commit 906e9ccf6ba7c18008e27f5587a4a83cc67f4189
Author: Benjamin Banaskiewicz
Date: Thu Apr 20 08:44:01 2023 +0000
[installation] Initial commit with first config
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..fa1d8ac
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,5 @@
+var
+*.pyc
+HYPO*
+RESET*
+share/maps
diff --git a/bin/Hypo71PC b/bin/Hypo71PC
new file mode 100755
index 0000000..6b12024
Binary files /dev/null and b/bin/Hypo71PC differ
diff --git a/bin/arclink2inv b/bin/arclink2inv
new file mode 100755
index 0000000..4ab50d6
--- /dev/null
+++ b/bin/arclink2inv
@@ -0,0 +1,82 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+import seiscomp.datamodel
+import seiscomp.io
+import getopt
+import sys
+
+
+usage = """arclink2inv [options] input=stdin output=stdout
+
+Options:
+ -h [ --help ] Produce help message
+ -f [ --formatted ] Enable formatted XML output
+"""
+
+
+def main(argv):
+ imp = seiscomp.io.Importer.Create("arclink")
+ if imp is None:
+ sys.stderr.write("Arclink import not available\n")
+ return 1
+
+ formatted = False
+
+ # parse command line options
+ try:
+ opts, args = getopt.getopt(argv[1:], "hf", ["help", "formatted"])
+ except getopt.error as msg:
+ sys.stderr.write("%s\n" % msg)
+ sys.stderr.write("for help use --help\n")
+ return 1
+
+ for o, a in opts:
+ if o in ["-h", "--help"]:
+ sys.stderr.write("%s\n" % usage)
+ return 1
+ elif o in ["-f", "--formatted"]:
+ formatted = True
+
+ argv = args
+
+ if len(argv) > 0:
+ o = imp.read(argv[0])
+ else:
+ o = imp.read("-")
+
+ inv = seiscomp.datamodel.Inventory.Cast(o)
+ if inv is None:
+ sys.stderr.write("No inventory found\n")
+ return 1
+
+ ar = seiscomp.io.XMLArchive()
+ if len(argv) > 1:
+ res = ar.create(argv[1])
+ else:
+ res = ar.create("-")
+
+ if not res:
+ sys.stderr.write("Failed to open output\n")
+ return 1
+
+ ar.setFormattedOutput(formatted)
+ ar.writeObject(inv)
+ ar.close()
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv))
diff --git a/bin/bindings2cfg b/bin/bindings2cfg
new file mode 100755
index 0000000..bae2a58
--- /dev/null
+++ b/bin/bindings2cfg
@@ -0,0 +1,26 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+
+############################################################################
+# Copyright (C) gempa GmbH #
+# All rights reserved. #
+# Contact: gempa GmbH (seiscomp-dev@gempa.de) #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+# #
+# Other Usage #
+# Alternatively, this file may be used in accordance with the terms and #
+# conditions contained in a signed written agreement between you and #
+# gempa GmbH. #
+############################################################################
+
+import seiscomp.bindings2cfg
+import sys
+
+sys.exit(seiscomp.bindings2cfg.main())
diff --git a/bin/dlsv2inv b/bin/dlsv2inv
new file mode 100755
index 0000000..dd3be2f
Binary files /dev/null and b/bin/dlsv2inv differ
diff --git a/bin/ew2sc b/bin/ew2sc
new file mode 100755
index 0000000..730f96a
Binary files /dev/null and b/bin/ew2sc differ
diff --git a/bin/extr_file b/bin/extr_file
new file mode 100755
index 0000000..9cf416f
--- /dev/null
+++ b/bin/extr_file
@@ -0,0 +1,28 @@
+#!/usr/bin/env seiscomp-python
+
+from __future__ import print_function
+import sys
+from seiscomp import mseedlite as mseed
+
+open_files = {}
+
+if len(sys.argv) != 2:
+ print("Usage: extr_file FILE")
+ sys.exit(1)
+
+for rec in mseed.Input(open(sys.argv[1], "rb")):
+ oname = "%s.%s.%s.%s" % (rec.sta, rec.net, rec.loc, rec.cha)
+
+ if oname not in open_files:
+ postfix = ".D.%04d.%03d.%02d%02d" % (rec.begin_time.year,
+ rec.begin_time.timetuple()[7], rec.begin_time.hour,
+ rec.begin_time.minute)
+
+ open_files[oname] = open(oname + postfix, "ab")
+
+ ofile = open_files[oname]
+ ofile.write(rec.header + rec.data)
+
+for oname in open_files:
+ open_files[oname].close()
+
diff --git a/bin/fdsnws b/bin/fdsnws
new file mode 100755
index 0000000..5cd594c
--- /dev/null
+++ b/bin/fdsnws
@@ -0,0 +1,1482 @@
+#!/usr/bin/env seiscomp-python
+
+################################################################################
+# Copyright (C) 2013-2014 gempa GmbH
+#
+# FDSNWS -- Implements FDSN Web Service interface, see
+# http://www.fdsn.org/webservices/
+#
+# Implemented Services:
+# fdsnws-dataselect
+# fdsnws-event
+# fdsnws-station
+# fdsnws-availability
+#
+# Author: Stephan Herrnkind
+# Email: herrnkind@gempa.de
+###############################################################################
+
+from __future__ import absolute_import, division, print_function
+
+import base64
+import fnmatch
+import os
+import re
+import signal
+import sys
+import time
+
+try:
+ from twisted.cred import checkers, credentials, error, portal
+ from twisted.internet import reactor, defer, task
+ from twisted.web import guard, static
+ from twisted.python import log, failure
+ from zope.interface import implementer
+except ImportError as e:
+ sys.exit("%s\nIs python twisted installed?" % str(e))
+
+import seiscomp.core
+import seiscomp.datamodel
+import seiscomp.io
+import seiscomp.logging
+import seiscomp.client
+import seiscomp.system
+
+
+from seiscomp.fdsnws.utils import isRestricted, py3ustr, py3bstr
+from seiscomp.fdsnws.dataselect import FDSNDataSelect, FDSNDataSelectRealm, \
+ FDSNDataSelectAuthRealm
+from seiscomp.fdsnws.dataselect import VERSION as DataSelectVersion
+from seiscomp.fdsnws.event import FDSNEvent
+from seiscomp.fdsnws.event import VERSION as EventVersion
+from seiscomp.fdsnws.station import FDSNStation
+from seiscomp.fdsnws.station import VERSION as StationVersion
+from seiscomp.fdsnws.availability import FDSNAvailabilityQuery, \
+ FDSNAvailabilityQueryRealm, FDSNAvailabilityQueryAuthRealm, \
+ FDSNAvailabilityExtent, FDSNAvailabilityExtentRealm, \
+ FDSNAvailabilityExtentAuthRealm
+from seiscomp.fdsnws.availability import VERSION as AvailabilityVersion
+from seiscomp.fdsnws.http import DirectoryResource, ListingResource, \
+ NoResource, Site, ServiceVersion, AuthResource, WADLFilter
+from seiscomp.fdsnws.log import Log
+
+
+def logSC3(entry):
+ try:
+ isError = entry['isError']
+ msg = entry['message']
+ if isError:
+ for l in msg:
+ seiscomp.logging.error("[reactor] %s" % l)
+ else:
+ for l in msg:
+ seiscomp.logging.info("[reactor] %s" % l)
+ except Exception:
+ pass
+
+
+###############################################################################
+# Make CORS work with queryauth
+class HTTPAuthSessionWrapper(guard.HTTPAuthSessionWrapper):
+ def __init__(self, *args, **kwargs):
+ guard.HTTPAuthSessionWrapper.__init__(self, *args, **kwargs)
+
+ def render(self, request):
+ if request.method == b'OPTIONS':
+ request.setHeader(b'Allow', b'GET,HEAD,POST,OPTIONS')
+ return b''
+
+ return guard.HTTPAuthSessionWrapper.render(self, request)
+
+
+###############################################################################
+@implementer(checkers.ICredentialsChecker)
+class UsernamePasswordChecker(object):
+
+ credentialInterfaces = (credentials.IUsernamePassword,
+ credentials.IUsernameHashedPassword)
+
+ # -------------------------------------------------------------------------
+ def __init__(self, userdb):
+ self.__userdb = userdb
+
+ # -------------------------------------------------------------------------
+ @staticmethod
+ def __cbPasswordMatch(matched, username):
+ if matched:
+ return username
+
+ return failure.Failure(error.UnauthorizedLogin())
+
+ # -------------------------------------------------------------------------
+ def requestAvatarId(self, cred):
+ return defer.maybeDeferred(self.__userdb.checkPassword, cred) \
+ .addCallback(self.__cbPasswordMatch, cred.username)
+
+
+###############################################################################
+class UserDB(object):
+
+ # -------------------------------------------------------------------------
+ def __init__(self):
+ self.__users = {}
+ self.__blacklist = set()
+ task.LoopingCall(self.__expireUsers).start(60, False)
+
+ # -------------------------------------------------------------------------
+ def __expireUsers(self):
+ for (name, (_, _, expires)) in list(self.__users.items()):
+ if time.time() > expires:
+ seiscomp.logging.info("de-registering %s" % name)
+ del self.__users[name]
+
+ # -------------------------------------------------------------------------
+ def blacklistUser(self, name):
+ seiscomp.logging.info("blacklisting %s" % name)
+ self.__blacklist.add(name)
+
+ # -------------------------------------------------------------------------
+ def addUser(self, name, attributes, expires, data):
+ try:
+ password = self.__users[name][0]
+
+ except KeyError:
+ bl = " (blacklisted)" if name in self.__blacklist else ""
+ seiscomp.logging.notice("registering %s%s %s" % (name, bl, data))
+ password = base64.urlsafe_b64encode(os.urandom(12))
+
+ attributes['blacklisted'] = name in self.__blacklist
+ self.__users[name] = (password, attributes, expires)
+ return password
+
+ # -------------------------------------------------------------------------
+ def checkPassword(self, cred):
+ try:
+ pw = self.__users[cred.username][0]
+
+ except KeyError:
+ return False
+
+ return cred.checkPassword(pw)
+
+ # -------------------------------------------------------------------------
+ def getAttributes(self, name):
+ return self.__users[name][1]
+
+ # -------------------------------------------------------------------------
+ def dump(self):
+ seiscomp.logging.info("known users:")
+
+ for name, user in list(self.__users.items()):
+ seiscomp.logging.info(" %s %s %d" % (py3ustr(name),
+ user[1], user[2]))
+
+
+###############################################################################
+class Access(object):
+
+ # -------------------------------------------------------------------------
+ def __init__(self):
+ self.__access = {}
+
+ # -------------------------------------------------------------------------
+ def initFromSC3Routing(self, routing):
+ for i in range(routing.accessCount()):
+ acc = routing.access(i)
+ net = acc.networkCode()
+ sta = acc.stationCode()
+ loc = acc.locationCode()
+ cha = acc.streamCode()
+ user = acc.user()
+ start = acc.start()
+
+ try:
+ end = acc.end()
+
+ except ValueError:
+ end = None
+
+ self.__access.setdefault((net, sta, loc, cha), []) \
+ .append((user, start, end))
+
+ # -------------------------------------------------------------------------
+ @staticmethod
+ def __matchTime(t1, t2, accessStart, accessEnd):
+ return (not accessStart or (t1 and t1 >= accessStart)) and \
+ (not accessEnd or (t2 and t2 <= accessEnd))
+
+ # -------------------------------------------------------------------------
+ @staticmethod
+ def __matchEmail(emailAddress, accessUser):
+ defaultPrefix = "mail:"
+
+ if accessUser.startswith(defaultPrefix):
+ accessUser = accessUser[len(defaultPrefix):]
+
+ return emailAddress.upper() == accessUser.upper() or (
+ accessUser[:1] == '@' and emailAddress[:1] != '@' and
+ emailAddress.upper().endswith(accessUser.upper()))
+
+ # -------------------------------------------------------------------------
+ @staticmethod
+ def __matchAttribute(attribute, accessUser):
+ return attribute.upper() == accessUser.upper()
+
+ # -------------------------------------------------------------------------
+ def authorize(self, user, net, sta, loc, cha, t1, t2):
+ if user['blacklisted']:
+ return False
+
+ matchers = []
+
+ try:
+ # OID 0.9.2342.19200300.100.1.3 (RFC 2798)
+ emailAddress = user['mail']
+ matchers.append((self.__matchEmail, emailAddress))
+
+ except KeyError:
+ pass
+
+ try:
+ # B2ACCESS
+ for memberof in user['memberof'].split(';'):
+ matchers.append((self.__matchAttribute, "group:" + memberof))
+
+ except KeyError:
+ pass
+
+ for m in matchers:
+ for (u, start, end) in self.__access.get((net, '', '', ''), []):
+ if self.__matchTime(t1, t2, start, end) and m[0](m[1], u):
+ return True
+
+ for (u, start, end) in self.__access.get((net, sta, '', ''), []):
+ if self.__matchTime(t1, t2, start, end) and m[0](m[1], u):
+ return True
+
+ for (u, start, end) in self.__access.get((net, sta, loc, cha), []):
+ if self.__matchTime(t1, t2, start, end) and m[0](m[1], u):
+ return True
+
+ return False
+
+
+###############################################################################
+class DataAvailabilityCache(object):
+
+ # -------------------------------------------------------------------------
+ def __init__(self, app, da, validUntil):
+ self._da = da
+ self._validUntil = validUntil
+ self._extents = {}
+ self._extentsSorted = []
+ self._extentsOID = {}
+
+ for i in range(self._da.dataExtentCount()):
+ ext = self._da.dataExtent(i)
+ wid = ext.waveformID()
+ sid = "%s.%s.%s.%s" % (wid.networkCode(), wid.stationCode(),
+ wid.locationCode(), wid.channelCode())
+ restricted = app._openStreams is None or sid not in app._openStreams
+ if restricted and not app._allowRestricted:
+ continue
+ self._extents[sid] = (ext, restricted)
+ # seiscomp.logging.debug("%s: %s ~ %s" % (sid, ext.start().iso(),
+ # ext.end().iso()))
+
+ if app._serveAvailability:
+ # load data attribute extents if availability is served
+ for i in range(da.dataExtentCount()):
+ extent = da.dataExtent(i)
+ app.query().loadDataAttributeExtents(extent)
+
+ # create a list of (extent, oid, restricted) tuples sorted by stream
+ self._extentsSorted = [(e, app.query().getCachedId(e), res)
+ for wid, (e, res) in sorted(
+ self._extents.items(),
+ key=lambda t: t[0])]
+
+ # create a dictionary of object ID to extents
+ self._extentsOID = dict((oid, (e, res))
+ for (e, oid, res) in self._extentsSorted)
+
+ seiscomp.logging.info("loaded %i extents" % len(self._extents))
+
+ # -------------------------------------------------------------------------
+ def validUntil(self):
+ return self._validUntil
+
+ # -------------------------------------------------------------------------
+ def extent(self, net, sta, loc, cha):
+ wid = "%s.%s.%s.%s" % (net, sta, loc, cha)
+ if wid in self._extents:
+ return self._extents[wid][0]
+
+ return None
+
+ # -------------------------------------------------------------------------
+ def extents(self):
+ return self._extents
+
+ # -------------------------------------------------------------------------
+ def extentsSorted(self):
+ return self._extentsSorted
+
+ # -------------------------------------------------------------------------
+ def extentsOID(self):
+ return self._extentsOID
+
+ # -------------------------------------------------------------------------
+ def dataAvailability(self):
+ return self._da
+
+
+###############################################################################
+class FDSNWS(seiscomp.client.Application):
+
+ # -------------------------------------------------------------------------
+ def __init__(self, argc, argv):
+ seiscomp.client.Application.__init__(self, argc, argv)
+ self.setMessagingEnabled(True)
+ self.setDatabaseEnabled(True, True)
+ self.setRecordStreamEnabled(True)
+ self.setLoadInventoryEnabled(True)
+
+ self._serverRoot = os.path.dirname(__file__)
+ self._listenAddress = '0.0.0.0' # all interfaces
+ self._port = 8080
+ self._connections = 5
+ self._queryObjects = 100000 # maximum number of objects per query
+ self._realtimeGap = None # minimum data age: 5min
+ self._samplesM = None # maximum number of samples per query
+ self._recordBulkSize = 102400 # desired record bulk size
+ self._htpasswd = '@CONFIGDIR@/fdsnws.htpasswd'
+ self._accessLogFile = ''
+ self._requestLogFile = ''
+ self._userSalt = ''
+ self._corsOrigins = ['*']
+
+ self._allowRestricted = True
+ self._useArclinkAccess = False
+ self._serveDataSelect = True
+ self._serveEvent = True
+ self._serveStation = True
+ self._serveAvailability = False
+ self._daEnabled = False
+ self._daCacheDuration = 300
+ self._daCache = None
+ self._openStreams = None
+ self._daRepositoryName = 'primary'
+ self._daDCCName = 'DCC'
+ self._handleConditionalRequests = False
+
+ self._hideAuthor = False
+ self._hideComments = False
+ self._evaluationMode = None
+ self._eventTypeWhitelist = None
+ self._eventTypeBlacklist = None
+ self._eventFormats = None
+ self._stationFilter = None
+ self._dataSelectFilter = None
+ self._debugFilter = False
+
+ self._accessLog = None
+
+ self._fileNamePrefix = 'fdsnws'
+
+ self._trackdbEnabled = False
+ self._trackdbDefaultUser = 'fdsnws'
+
+ self._authEnabled = False
+ self._authGnupgHome = '@ROOTDIR@/var/lib/gpg'
+ self._authBlacklist = []
+
+ self._userdb = UserDB()
+ self._access = None
+ self._checker = None
+
+ self._requestLog = None
+ self.__reloadRequested = False
+ self.__timeInventoryLoaded = None
+ self.__tcpPort = None
+
+ # Leave signal handling to us
+ seiscomp.client.Application.HandleSignals(False, False)
+
+ # -------------------------------------------------------------------------
+ def initConfiguration(self):
+ if not seiscomp.client.Application.initConfiguration(self):
+ return False
+
+ # bind address and port
+ try:
+ self._listenAddress = self.configGetString('listenAddress')
+ except Exception:
+ pass
+ try:
+ self._port = self.configGetInt('port')
+ except Exception:
+ pass
+
+ # maximum number of connections
+ try:
+ self._connections = self.configGetInt('connections')
+ except Exception:
+ pass
+
+ # maximum number of objects per query, used in fdsnws-station and
+ # fdsnws-event to limit main memory consumption
+ try:
+ self._queryObjects = self.configGetInt('queryObjects')
+ except Exception:
+ pass
+
+ # restrict end time of request to now-realtimeGap seconds, used in
+ # fdsnws-dataselect
+ try:
+ self._realtimeGap = self.configGetInt('realtimeGap')
+ except Exception:
+ pass
+
+ # maximum number of samples (in units of million) per query, used in
+ # fdsnws-dataselect to limit bandwidth
+ try:
+ self._samplesM = self.configGetDouble('samplesM')
+ except Exception:
+ pass
+
+ try:
+ self._recordBulkSize = self.configGetInt('recordBulkSize')
+ except Exception:
+ pass
+
+ if self._recordBulkSize < 1:
+ print("Invalid recordBulkSize, must be larger than 0",
+ file=sys.stderr)
+ return False
+
+ # location of htpasswd file
+ try:
+ self._htpasswd = self.configGetString('htpasswd')
+ except Exception:
+ pass
+ self._htpasswd = seiscomp.system.Environment.Instance() \
+ .absolutePath(self._htpasswd)
+
+ # location of access log file
+ try:
+ self._accessLogFile = seiscomp.system.Environment.Instance() \
+ .absolutePath(self.configGetString('accessLog'))
+ except Exception:
+ pass
+
+ # location of request log file
+ try:
+ self._requestLogFile = seiscomp.system.Environment.Instance() \
+ .absolutePath(self.configGetString('requestLog'))
+ except Exception:
+ pass
+
+ # user salt
+ try:
+ self._userSalt = self.configGetString('userSalt')
+ except Exception:
+ pass
+
+ # list of allowed CORS origins
+ try:
+ self._corsOrigins = list(filter(None,
+ self.configGetStrings('corsOrigins')))
+ except Exception:
+ pass
+
+ # access to restricted inventory information
+ try:
+ self._allowRestricted = self.configGetBool('allowRestricted')
+ except Exception:
+ pass
+
+ # time-based conditional requests handled by fdsnws-station
+ try:
+ self._handleConditionalRequests = \
+ self.configGetBool('handleConditionalRequests')
+ except Exception:
+ pass
+
+ # use arclink-access bindings
+ try:
+ self._useArclinkAccess = self.configGetBool('useArclinkAccess')
+ except Exception:
+ pass
+
+ # services to enable
+ try:
+ self._serveDataSelect = self.configGetBool('serveDataSelect')
+ except Exception:
+ pass
+ try:
+ self._serveEvent = self.configGetBool('serveEvent')
+ except Exception:
+ pass
+ try:
+ self._serveStation = self.configGetBool('serveStation')
+ except Exception:
+ pass
+ try:
+ self._serveAvailability = self.configGetBool('serveAvailability')
+ except Exception:
+ pass
+
+ # data availability
+ try:
+ self._daEnabled = self.configGetBool('dataAvailability.enable')
+ except Exception:
+ pass
+ try:
+ self._daCacheDuration = self.configGetInt(
+ 'dataAvailability.cacheDuration')
+ except Exception:
+ pass
+ try:
+ self._daRepositoryName = self.configGetString(
+ 'dataAvailability.repositoryName')
+ except Exception:
+ pass
+ try:
+ self._daDCCName = self.configGetString('dataAvailability.dccName')
+ except Exception:
+ pass
+
+ if self._serveAvailability and not self._daEnabled:
+ print("can't serve availabilty without dataAvailability.enable "
+ "set to true", file=sys.stderr)
+ return False
+ if not bool(re.match(r'^[a-zA-Z0-9_\ -]*$', self._daRepositoryName)):
+ print("invalid characters in dataAvailability.repositoryName",
+ file=sys.stderr)
+ return False
+ if not bool(re.match(r'^[a-zA-Z0-9_\ -]*$', self._daDCCName)):
+ print("invalid characters in dataAvailability.dccName",
+ file=sys.stderr)
+ return False
+
+ # event filter
+ try:
+ self._hideAuthor = self.configGetBool('hideAuthor')
+ except Exception:
+ pass
+ try:
+ self._hideComments = self.configGetBool('hideComments')
+ except Exception:
+ pass
+ try:
+ name = self.configGetString('evaluationMode')
+ if name.lower() == seiscomp.datamodel.EEvaluationModeNames.name(
+ seiscomp.datamodel.MANUAL):
+ self._evaluationMode = seiscomp.datamodel.MANUAL
+ elif name.lower() == seiscomp.datamodel.EEvaluationModeNames.name(
+ seiscomp.datamodel.AUTOMATIC):
+ self._evaluationMode = seiscomp.datamodel.AUTOMATIC
+ else:
+ print("invalid evaluation mode string: %s" % name,
+ file=sys.stderr)
+ return False
+ except Exception:
+ pass
+ try:
+ strings = self.configGetStrings('eventType.whitelist')
+ if len(strings) > 1 or strings[0]:
+ try:
+ self._eventTypeWhitelist = self._parseEventTypes(strings)
+ except Exception as e:
+ print("error parsing eventType.whitelist: %s" % str(e),
+ file=sys.stderr)
+ return False
+ except Exception:
+ pass
+ try:
+ strings = self.configGetStrings('eventType.blacklist')
+ if len(strings) > 1 or strings[0]:
+ try:
+ self._eventTypeBlacklist = self._parseEventTypes(strings)
+ if self._eventTypeWhitelist:
+ lenBefore = len(self._eventTypeWhitelist)
+ diff = self._eventTypeWhitelist.difference(
+ self._eventTypeBlacklist)
+ overlapCount = lenBefore - len(diff)
+ if overlapCount > 0:
+ self._eventTypeWhitelist = diff
+ print("warning: found %i overlapping event "
+ "types in white and black list, black "
+ "list takes precedence" % overlapCount,
+ file=sys.stderr)
+ except Exception as e:
+ print("error parsing eventType.blacklist: %s" % str(e),
+ file=sys.stderr)
+ return False
+ except Exception:
+ pass
+ try:
+ strings = self.configGetStrings('eventFormats')
+ if len(strings) > 1 or strings[0]:
+ self._eventFormats = [s.lower() for s in strings]
+ except Exception:
+ pass
+
+ # station filter
+ try:
+ self._stationFilter = seiscomp.system.Environment.Instance() \
+ .absolutePath(self.configGetString('stationFilter'))
+ except Exception:
+ pass
+
+ # dataSelect filter
+ try:
+ self._dataSelectFilter = seiscomp.system.Environment.Instance() \
+ .absolutePath(self.configGetString('dataSelectFilter'))
+ except Exception:
+ pass
+
+ # output filter debug information
+ try:
+ self._debugFilter = self.configGetBool('debugFilter')
+ except Exception:
+ pass
+
+ # prefix to be used as default for output filenames
+ try:
+ self._fileNamePrefix = self.configGetString('fileNamePrefix')
+ except Exception:
+ pass
+
+ # save request logs in database?
+ try:
+ self._trackdbEnabled = self.configGetBool('trackdb.enable')
+ except Exception:
+ pass
+
+ # default user
+ try:
+ self._trackdbDefaultUser = self.configGetString(
+ 'trackdb.defaultUser')
+ except Exception:
+ pass
+
+ # enable authentication extension?
+ try:
+ self._authEnabled = self.configGetBool('auth.enable')
+ except Exception:
+ pass
+
+ # GnuPG home directory
+ try:
+ self._authGnupgHome = self.configGetString('auth.gnupgHome')
+ except Exception:
+ pass
+ self._authGnupgHome = seiscomp.system.Environment.Instance() \
+ .absolutePath(self._authGnupgHome)
+
+ # blacklist of users/tokens
+ try:
+ strings = self.configGetStrings('auth.blacklist')
+ if len(strings) > 1 or strings[0]:
+ self._authBlacklist = strings
+ except Exception:
+ pass
+
+ # If the database connection is passed via command line or
+ # configuration file then messaging is disabled. Messaging is only used
+ # to get the configured database connection URI.
+ if self.databaseURI() != "":
+ self.setMessagingEnabled(self._trackdbEnabled)
+ else:
+ # Without the event service, a database connection is not
+ # required if the inventory is loaded from file and no data
+ # availability information should be processed
+ if not self._serveEvent and not self._useArclinkAccess and \
+ (not self._serveStation or \
+ (not self.isInventoryDatabaseEnabled() and not self._daEnabled)):
+ self.setMessagingEnabled(self._trackdbEnabled)
+ self.setDatabaseEnabled(False, False)
+
+ return True
+
+ def printUsage(self):
+
+ print('''Usage:
+ fdsnws [options]
+
+Provide FDSN Web Services''')
+
+ seiscomp.client.Application.printUsage(self)
+
+ print('''Examples:
+Execute on command line with debug output
+ fdsnws --debug
+''')
+
+ # -------------------------------------------------------------------------
+ # Signal handling in Python and fork in wrapped C++ code is not a good
+ # combination. Without digging too much into the problem, forking the
+ # process with os.fork() helps
+ def forkProcess(self):
+ cp = os.fork()
+ if cp < 0:
+ return False
+ if cp == 0:
+ return True
+
+ sys.exit(0)
+ return True
+
+ # -------------------------------------------------------------------------
+ def getDACache(self):
+ if not self._daEnabled:
+ return None
+
+ now = seiscomp.core.Time.GMT()
+ # check if cache is still valid
+ if self._daCache is None or now > self._daCache.validUntil():
+
+ if self.query() is None:
+ seiscomp.logging.error('failed to connect to database')
+ return None
+
+ da = seiscomp.datamodel.DataAvailability()
+ self.query().loadDataExtents(da)
+ validUntil = now + seiscomp.core.TimeSpan(self._daCacheDuration, 0)
+ self._daCache = DataAvailabilityCache(self, da, validUntil)
+
+ return self._daCache
+
+ # -------------------------------------------------------------------------
+ @staticmethod
+ def _parseEventTypes(names):
+ types = set()
+ typeMap = {seiscomp.datamodel.EEventTypeNames.name(i): i
+ for i in range(seiscomp.datamodel.EEventTypeQuantity)}
+ for n in names:
+ name = n.lower().strip()
+ if name == "unknown":
+ types.add(-1)
+ else:
+ if name in typeMap:
+ types.add(typeMap[name])
+ else:
+ raise Exception("event type name '%s' not supported"
+ % name)
+
+ return types
+
+ # -------------------------------------------------------------------------
+ @staticmethod
+ def _formatEventTypes(types):
+ return ",".join(["unknown" if i < 0 else
+ seiscomp.datamodel.EEventTypeNames.name(i)
+ for i in sorted(types)])
+
+ # -------------------------------------------------------------------------
+ def _site(self):
+ modeStr = None
+ if self._evaluationMode is not None:
+ modeStr = seiscomp.datamodel.EEvaluationModeNames.name(self._evaluationMode)
+ whitelistStr = ""
+ if self._eventTypeWhitelist is not None:
+ whitelistStr = ", ".join(["unknown" if i < 0 else
+ seiscomp.datamodel.EEventTypeNames.name(i)
+ for i in sorted(self._eventTypeWhitelist)])
+ blacklistStr = ""
+ if self._eventTypeBlacklist is not None:
+ blacklistStr = ", ".join(["unknown" if i < 0 else
+ seiscomp.datamodel.EEventTypeNames.name(i)
+ for i in sorted(self._eventTypeBlacklist)])
+ stationFilterStr = ""
+ if self._stationFilter is not None:
+ stationFilterStr = self._stationFilter
+ dataSelectFilterStr = ""
+ if self._dataSelectFilter is not None:
+ dataSelectFilterStr = self._dataSelectFilter
+ seiscomp.logging.debug("""
+configuration read:
+ serve
+ dataselect : {}
+ event : {}
+ station : {}
+ availability : {}
+ listenAddress : {}
+ port : {}
+ connections : {}
+ htpasswd : {}
+ accessLog : {}
+ CORS origins : {}
+ queryObjects : {}
+ realtimeGap : {}
+ samples (M) : {}
+ recordBulkSize : {}
+ allowRestricted : {}
+ handleConditionalRequests: {}
+ useArclinkAccess : {}
+ hideAuthor : {}
+ hideComments : {}
+ evaluationMode : {}
+ data availability
+ enabled : {}
+ cache duration : {}
+ repo name : {}
+ dcc name : {}
+ eventType
+ whitelist : {}
+ blacklist : {}
+ inventory filter
+ station : {}
+ dataSelect : {}
+ debug enabled : {}
+ trackdb
+ enabled : {}
+ defaultUser : {}
+ auth
+ enabled : {}
+ gnupgHome : {}
+ requestLog : {}""".format( \
+ self._serveDataSelect, self._serveEvent, self._serveStation,
+ self._serveAvailability, self._listenAddress, self._port,
+ self._connections, self._htpasswd, self._accessLogFile,
+ self._corsOrigins, self._queryObjects, self._realtimeGap,
+ self._samplesM, self._recordBulkSize, self._allowRestricted,
+ self._handleConditionalRequests, self._useArclinkAccess,
+ self._hideAuthor, self._hideComments, modeStr, self._daEnabled,
+ self._daCacheDuration, self._daRepositoryName, self._daDCCName,
+ whitelistStr, blacklistStr, stationFilterStr, dataSelectFilterStr,
+ self._debugFilter, self._trackdbEnabled, self._trackdbDefaultUser,
+ self._authEnabled, self._authGnupgHome, self._requestLogFile))
+
+ if not self._serveDataSelect and not self._serveEvent and \
+ not self._serveStation:
+ seiscomp.logging.error("all services disabled through configuration")
+ return None
+
+ # access logger if requested
+ if self._accessLogFile:
+ self._accessLog = Log(self._accessLogFile)
+
+ # load inventory needed by DataSelect and Station service
+ stationInv = dataSelectInv = None
+ if self._serveDataSelect or self._serveStation:
+ retn = False
+ stationInv = dataSelectInv = seiscomp.client.Inventory.Instance().inventory()
+ seiscomp.logging.info("inventory loaded")
+
+ if self._serveDataSelect and self._serveStation:
+ # clone inventory if station and dataSelect filter are distinct
+ # else share inventory between both services
+ if self._stationFilter != self._dataSelectFilter:
+ dataSelectInv = self._cloneInventory(stationInv)
+ retn = self._filterInventory(stationInv, self._stationFilter, "station") and \
+ self._filterInventory(
+ dataSelectInv, self._dataSelectFilter, "dataSelect")
+ else:
+ retn = self._filterInventory(
+ stationInv, self._stationFilter)
+ elif self._serveStation:
+ retn = self._filterInventory(stationInv, self._stationFilter)
+ else:
+ retn = self._filterInventory(
+ dataSelectInv, self._dataSelectFilter)
+
+ self.__timeInventoryLoaded = seiscomp.core.Time.GMT()
+
+ if not retn:
+ return None
+
+ if self._authEnabled:
+ self._access = Access()
+ self._checker = UsernamePasswordChecker(self._userdb)
+ else:
+ self._access = Access() if self._useArclinkAccess else None
+ self._checker = checkers.FilePasswordDB(self._htpasswd, cache=True)
+
+ if self._serveDataSelect and self._useArclinkAccess:
+ self._access.initFromSC3Routing(self.query().loadRouting())
+
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
+
+ shareDir = os.path.join(seiscomp.system.Environment.Instance().shareDir(), 'fdsnws')
+
+ # Overwrite/set mime type of *.wadl and *.xml documents. Instead of
+ # using the official types defined in /etc/mime.types 'application/xml'
+ # is used as enforced by the FDSNWS spec.
+ static.File.contentTypes['.wadl'] = 'application/xml'
+ static.File.contentTypes['.xml'] = 'application/xml'
+
+ # create resource tree /fdsnws/...
+ root = ListingResource()
+
+ fileName = os.path.join(shareDir, 'favicon.ico')
+ fileRes = static.File(fileName, 'image/x-icon')
+ fileRes.childNotFound = NoResource()
+ fileRes.isLeaf = True
+ root.putChild(b'favicon.ico', fileRes)
+
+ prefix = ListingResource()
+ root.putChild(b'fdsnws', prefix)
+
+ # dataselect
+ if self._serveDataSelect:
+ dataselect = ListingResource(DataSelectVersion)
+ prefix.putChild(b'dataselect', dataselect)
+ lstFile = os.path.join(shareDir, 'dataselect.html')
+ dataselect1 = DirectoryResource(lstFile, DataSelectVersion)
+ dataselect.putChild(b'1', dataselect1)
+
+ # query
+ dataselect1.putChild(b'query', FDSNDataSelect(
+ dataSelectInv, self._recordBulkSize))
+
+ # queryauth
+ if self._authEnabled:
+ realm = FDSNDataSelectAuthRealm(
+ dataSelectInv, self._recordBulkSize, self._access, self._userdb)
+ else:
+ realm = FDSNDataSelectRealm(
+ dataSelectInv, self._recordBulkSize, self._access)
+ msg = 'authorization for restricted time series data required'
+ authSession = self._getAuthSessionWrapper(realm, msg)
+ dataselect1.putChild(b'queryauth', authSession)
+
+ # version
+ dataselect1.putChild(b'version', ServiceVersion(DataSelectVersion))
+ fileRes = static.File(os.path.join(shareDir, 'dataselect.wadl'))
+ fileRes.childNotFound = NoResource(DataSelectVersion)
+
+ # application.wadl
+ dataselect1.putChild(b'application.wadl', fileRes)
+
+ # builder
+ fileRes = static.File(os.path.join(
+ shareDir, 'dataselect-builder.html'))
+ fileRes.childNotFound = NoResource(DataSelectVersion)
+ dataselect1.putChild(b'builder', fileRes)
+
+ if self._authEnabled:
+ dataselect1.putChild(b'auth', AuthResource(
+ DataSelectVersion, self._authGnupgHome, self._userdb))
+
+ # event
+ if self._serveEvent:
+ event = ListingResource(EventVersion)
+ prefix.putChild(b'event', event)
+ lstFile = os.path.join(shareDir, 'event.html')
+ event1 = DirectoryResource(lstFile, EventVersion)
+ event.putChild(b'1', event1)
+
+ # query
+ event1.putChild(b'query', FDSNEvent(
+ self._hideAuthor, self._hideComments, self._evaluationMode,
+ self._eventTypeWhitelist, self._eventTypeBlacklist,
+ self._eventFormats))
+
+ # catalogs
+ fileRes = static.File(os.path.join(shareDir, 'catalogs.xml'))
+ fileRes.childNotFound = NoResource(EventVersion)
+ event1.putChild(b'catalogs', fileRes)
+
+ # contributors
+ fileRes = static.File(os.path.join(shareDir, 'contributors.xml'))
+ fileRes.childNotFound = NoResource(EventVersion)
+ event1.putChild(b'contributors', fileRes)
+
+ # version
+ event1.putChild(b'version', ServiceVersion(EventVersion))
+
+ # application.wadl
+ filterList = ['includecomments'] if self._hideComments else []
+ try:
+ fileRes = WADLFilter(os.path.join(shareDir, 'event.wadl'),
+ filterList)
+ except Exception:
+ fileRes = NoResource(StationVersion)
+ event1.putChild(b'application.wadl', fileRes)
+
+ # builder
+ fileRes = static.File(os.path.join(shareDir, 'event-builder.html'))
+ fileRes.childNotFound = NoResource(EventVersion)
+ event1.putChild(b'builder', fileRes)
+
+ # station
+ if self._serveStation:
+ station = ListingResource(StationVersion)
+ prefix.putChild(b'station', station)
+ lstFile = os.path.join(shareDir, 'station.html')
+ station1 = DirectoryResource(lstFile, StationVersion)
+ station.putChild(b'1', station1)
+
+ # query
+ station1.putChild(b'query', FDSNStation(
+ stationInv, self._allowRestricted, self._queryObjects,
+ self._daEnabled, self._handleConditionalRequests,
+ self.__timeInventoryLoaded))
+
+ # version
+ station1.putChild(b'version', ServiceVersion(StationVersion))
+
+ # application.wadl
+ filterList = [] if self._daEnabled else ['matchtimeseries']
+ try:
+ fileRes = WADLFilter(os.path.join(shareDir, 'station.wadl'),
+ filterList)
+ except Exception:
+ fileRes = NoResource(StationVersion)
+ station1.putChild(b'application.wadl', fileRes)
+
+ # builder
+ fileRes = static.File(os.path.join(shareDir, 'station-builder.html'))
+ fileRes.childNotFound = NoResource(StationVersion)
+ station1.putChild(b'builder', fileRes)
+
+ # availability
+ if self._serveAvailability:
+
+ # create a set of waveformIDs which represent open channels
+ if self._serveDataSelect:
+ openStreams = set()
+ for iNet in range(dataSelectInv.networkCount()):
+ net = dataSelectInv.network(iNet)
+ if isRestricted(net):
+ continue
+ for iSta in range(net.stationCount()):
+ sta = net.station(iSta)
+ if isRestricted(sta):
+ continue
+ for iLoc in range(sta.sensorLocationCount()):
+ loc = sta.sensorLocation(iLoc)
+ for iCha in range(loc.streamCount()):
+ cha = loc.stream(iCha)
+ if isRestricted(cha):
+ continue
+ openStreams.add("{0}.{1}.{2}.{3}".format(
+ net.code(), sta.code(), loc.code(), cha.code()))
+ self._openStreams = openStreams
+ else:
+ self._openStreams = None
+
+ availability = ListingResource(AvailabilityVersion)
+ prefix.putChild(b'availability', availability)
+ lstFile = os.path.join(shareDir, 'availability.html')
+ availability1 = DirectoryResource(lstFile, AvailabilityVersion)
+ availability.putChild(b'1', availability1)
+
+ # query
+ availability1.putChild(b'query', FDSNAvailabilityQuery())
+
+ # queryauth
+ if self._authEnabled:
+ realm = FDSNAvailabilityQueryAuthRealm(self._access,
+ self._userdb)
+ else:
+ realm = FDSNAvailabilityQueryRealm(self._access)
+ msg = 'authorization for restricted availability segment data ' \
+ 'required'
+ authSession = self._getAuthSessionWrapper(realm, msg)
+ availability1.putChild(b'queryauth', authSession)
+
+ # extent
+ availability1.putChild(b'extent', FDSNAvailabilityExtent())
+
+ # extentauth
+ if self._authEnabled:
+ realm = FDSNAvailabilityExtentAuthRealm(self._access,
+ self._userdb)
+ else:
+ realm = FDSNAvailabilityExtentRealm(self._access)
+ msg = 'authorization for restricted availability extent data ' \
+ 'required'
+ authSession = self._getAuthSessionWrapper(realm, msg)
+ availability1.putChild(b'extentauth', authSession)
+
+ # version
+ availability1.putChild(
+ b'version', ServiceVersion(AvailabilityVersion))
+
+ # application.wadl
+ fileRes = static.File(os.path.join(shareDir, 'availability.wadl'))
+ fileRes.childNotFound = NoResource(AvailabilityVersion)
+ availability1.putChild(b'application.wadl', fileRes)
+
+ # builder-query
+ fileRes = static.File(os.path.join(
+ shareDir, 'availability-builder-query.html'))
+ fileRes.childNotFound = NoResource(AvailabilityVersion)
+ availability1.putChild(b'builder-query', fileRes)
+
+ # builder-extent
+ fileRes = static.File(os.path.join(
+ shareDir, 'availability-builder-extent.html'))
+ fileRes.childNotFound = NoResource(AvailabilityVersion)
+ availability1.putChild(b'builder-extent', fileRes)
+
+ # static files
+ fileRes = static.File(os.path.join(shareDir, 'js'))
+ fileRes.childNotFound = NoResource()
+ fileRes.hideInListing = True
+ prefix.putChild(b'js', fileRes)
+
+ fileRes = static.File(os.path.join(shareDir, 'css'))
+ fileRes.childNotFound = NoResource()
+ fileRes.hideInListing = True
+ prefix.putChild(b'css', fileRes)
+
+ return Site(root, self._corsOrigins)
+
+ # -------------------------------------------------------------------------
+ def _reloadTask(self):
+ if not self.__reloadRequested:
+ return
+
+ seiscomp.logging.info("reloading inventory")
+ self.reloadInventory()
+
+ site = self._site()
+
+ if site:
+ self.__tcpPort.factory = site
+
+ # remove reload file
+ try:
+ reloadfile = os.path.join(
+ seiscomp.system.Environment.Instance().installDir(),
+ 'var', 'run', '{}.reload'.format(self.name()))
+ if os.path.isfile(reloadfile):
+ os.remove(reloadfile)
+ except Exception as e:
+ seiscomp.logging.warning(
+ "error processing reload file: {}".format(e))
+
+ seiscomp.logging.info("reload successful")
+
+ else:
+ seiscomp.logging.info("reload failed")
+
+ self._userdb.dump()
+ self.__reloadRequested = False
+
+ # -------------------------------------------------------------------------
+ def _sighupHandler(self, signum, frame): #pylint: disable=W0613
+ if self.__reloadRequested:
+ seiscomp.logging.info("SIGHUP received, reload already in progress")
+ else:
+ seiscomp.logging.info("SIGHUP received, reload scheduled")
+ self.__reloadRequested = True
+
+ # -------------------------------------------------------------------------
+ def run(self):
+ retn = False
+ try:
+ # request logger if requested
+ self._requestLog = None
+ if self._requestLogFile:
+ # import here, so we don't depend on GeoIP if request log is not
+ # needed
+ from seiscomp.fdsnws.reqlog import RequestLog # pylint: disable=C0415
+ self._requestLog = RequestLog(self._requestLogFile, self._userSalt)
+
+ for user in self._authBlacklist:
+ self._userdb.blacklistUser(user)
+
+ site = self._site()
+
+ if not site:
+ return False
+
+ # start listen for incoming request
+ self.__tcpPort = reactor.listenTCP(self._port,
+ site,
+ self._connections,
+ self._listenAddress)
+
+ # setup signal handler
+ signal.signal(signal.SIGHUP, self._sighupHandler)
+ task.LoopingCall(self._reloadTask).start(1, False)
+
+ # start processing
+ seiscomp.logging.info("start listening")
+ log.addObserver(logSC3)
+
+ reactor.run()
+ retn = True
+ except Exception as e:
+ seiscomp.logging.error(str(e))
+
+ return retn
+
+ # -------------------------------------------------------------------------
+ @staticmethod
+ def _cloneInventory(inv):
+ wasEnabled = seiscomp.datamodel.PublicObject.IsRegistrationEnabled()
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
+ inv2 = seiscomp.datamodel.Inventory.Cast(inv.clone())
+
+ for iNet in range(inv.networkCount()):
+ net = inv.network(iNet)
+ net2 = seiscomp.datamodel.Network.Cast(net.clone())
+ inv2.add(net2)
+
+ for iSta in range(net.stationCount()):
+ sta = net.station(iSta)
+ sta2 = seiscomp.datamodel.Station.Cast(sta.clone())
+ net2.add(sta2)
+
+ for iLoc in range(sta.sensorLocationCount()):
+ loc = sta.sensorLocation(iLoc)
+ loc2 = seiscomp.datamodel.SensorLocation.Cast(loc.clone())
+ sta2.add(loc2)
+
+ for iCha in range(loc.streamCount()):
+ cha = loc.stream(iCha)
+ cha2 = seiscomp.datamodel.Stream.Cast(cha.clone())
+ loc2.add(cha2)
+
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled)
+ return inv2
+
+ # -------------------------------------------------------------------------
+ def _filterInventory(self, inv, fileName, serviceName=""):
+ if not fileName:
+ return True
+
+ class FilterRule:
+ def __init__(self, name, code):
+ self.name = name
+ self.exclude = name.startswith("!")
+ self.code = code
+
+ self.restricted = None
+ self.shared = None
+ self.netClass = None
+ self.archive = None
+
+ # read filter configuration from INI file
+ invFilter = []
+ includeRuleDefined = False
+ try:
+ # pylint: disable=C0415
+ if sys.version_info[0] < 3:
+ from ConfigParser import ConfigParser
+ from ConfigParser import Error as CPError
+ else:
+ from configparser import ConfigParser
+ from configparser import Error as CPError
+ except ImportError:
+ seiscomp.logging.error("could not load 'ConfigParser' Python module")
+ return False
+
+ cp = ConfigParser()
+
+ try:
+ seiscomp.logging.notice("reading inventory filter file: %s" % fileName)
+ fp = open(fileName, 'r')
+ if sys.version_info < (3, 2):
+ cp.readfp(fp) # pylint: disable=W1505
+ else:
+ cp.read_file(fp, fileName)
+
+ if len(cp.sections()) == 0:
+ return True
+
+ # check for mandatory code attribute
+ for sectionName in cp.sections():
+ code = ""
+ try:
+ code = cp.get(sectionName, "code")
+ except CPError:
+ seiscomp.logging.error(
+ "missing 'code' attribute in section {} of inventory "
+ "filter file {}".format(sectionName, fileName))
+ return False
+
+ rule = FilterRule(sectionName, str(code))
+
+ try:
+ rule.restricted = cp.getboolean(sectionName, 'restricted')
+ except CPError:
+ pass
+
+ try:
+ rule.shared = cp.getboolean(sectionName, 'shared')
+ except CPError:
+ pass
+
+ try:
+ rule.netClass = str(cp.get(sectionName, 'netClass'))
+ except CPError:
+ pass
+
+ try:
+ rule.archive = str(cp.get(sectionName, 'archive'))
+ except CPError:
+ pass
+
+ includeRuleDefined |= not rule.exclude
+ invFilter.append(rule)
+
+ except Exception as e:
+ seiscomp.logging.error(
+ "could not read inventory filter file %s: %s" % (fileName, str(e)))
+ return False
+
+ # apply filter
+ # networks
+ if self._debugFilter:
+ debugLines = []
+ delNet = delSta = delLoc = delCha = 0
+ iNet = 0
+ while iNet < inv.networkCount():
+ net = inv.network(iNet)
+
+ try:
+ netRestricted = net.restricted()
+ except ValueError:
+ netRestricted = None
+ try:
+ netShared = net.shared()
+ except ValueError:
+ netShared = None
+
+ # stations
+ iSta = 0
+ while iSta < net.stationCount():
+ sta = net.station(iSta)
+ staCode = "%s.%s" % (net.code(), sta.code())
+
+ try:
+ staRestricted = sta.restricted()
+ except ValueError:
+ staRestricted = None
+ try:
+ staShared = sta.shared()
+ except ValueError:
+ staShared = None
+
+ # sensor locations
+ iLoc = 0
+ while iLoc < sta.sensorLocationCount():
+ loc = sta.sensorLocation(iLoc)
+ locCode = "%s.%s" % (staCode, loc.code())
+
+ # channels
+ iCha = 0
+ while iCha < loc.streamCount():
+ cha = loc.stream(iCha)
+ code = "%s.%s" % (locCode, cha.code())
+
+ # evaluate rules until matching code is found
+ match = False
+ for rule in invFilter:
+ # code
+ if not fnmatch.fnmatchcase(code, rule.code):
+ continue
+
+ # restricted
+ if rule.restricted is not None:
+ try:
+ if cha.restricted() != rule.restricted:
+ continue
+ except ValueError:
+ if staRestricted is not None:
+ if staRestricted != rule.restricted:
+ continue
+ elif netRestricted is None or \
+ netRestricted != rule.restricted:
+ continue
+
+ # shared
+ if rule.shared is not None:
+ try:
+ if cha.shared() != rule.shared:
+ continue
+ except ValueError:
+ if staShared is not None:
+ if staShared != rule.shared:
+ continue
+ elif netShared is None or \
+ netShared != rule.shared:
+ continue
+
+ # netClass
+ if rule.netClass is not None and \
+ net.netClass() != rule.netClass:
+ continue
+
+ # archive
+ if rule.archive is not None and \
+ net.archive() != rule.archive:
+ continue
+
+ # the rule matched
+ match = True
+ break
+
+ if (match and rule.exclude) or \
+ (not match and includeRuleDefined):
+ loc.removeStream(iCha)
+ delCha += 1
+ reason = "no matching include rule"
+ if match:
+ reason = "'%s'" % rule.name
+ if self._debugFilter:
+ debugLines.append(
+ "%s [-]: %s" % (code, reason))
+ else:
+ iCha += 1
+ reason = "no matching exclude rule"
+ if match:
+ reason = "'%s'" % rule.name
+ if self._debugFilter:
+ debugLines.append(
+ "%s [+]: %s" % (code, reason))
+
+ # remove empty sensor locations
+ if loc.streamCount() == 0:
+ sta.removeSensorLocation(iLoc)
+ delLoc += 1
+ else:
+ iLoc += 1
+
+ # remove empty stations
+ if sta.sensorLocationCount() == 0:
+ delSta += 1
+ net.removeStation(iSta)
+ else:
+ iSta += 1
+
+ # remove empty networks
+ if net.stationCount() == 0:
+ delNet += 1
+ inv.removeNetwork(iNet)
+ else:
+ iNet += 1
+
+ if serviceName:
+ serviceName += ": "
+ seiscomp.logging.debug(
+ "%sremoved %i networks, %i stations, %i locations, %i streams" % (
+ serviceName, delNet, delSta, delLoc, delCha))
+ if self._debugFilter:
+ debugLines.sort()
+ seiscomp.logging.notice("%sfilter decisions based on file %s:\n%s" % (
+ serviceName, fileName, str("\n".join(debugLines))))
+
+ return True
+
+ # -------------------------------------------------------------------------
+ def _getAuthSessionWrapper(self, realm, msg):
+ p = portal.Portal(realm, [self._checker])
+ f = guard.DigestCredentialFactory('MD5', msg)
+ f.digest = credentials.DigestCredentialFactory('MD5', py3bstr(msg))
+ return HTTPAuthSessionWrapper(p, [f])
+
+
+fdsnwsApp = FDSNWS(len(sys.argv), sys.argv)
+sys.exit(fdsnwsApp())
+
+
+# vim: ts=4 et tw=79
diff --git a/bin/fdsnxml2inv b/bin/fdsnxml2inv
new file mode 100755
index 0000000..3aecdc4
Binary files /dev/null and b/bin/fdsnxml2inv differ
diff --git a/bin/import_inv b/bin/import_inv
new file mode 100755
index 0000000..3b963d7
--- /dev/null
+++ b/bin/import_inv
@@ -0,0 +1,134 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+import sys
+import os
+import subprocess
+import glob
+import seiscomp.client
+
+
+class Importer(seiscomp.client.Application):
+ def __init__(self, argc, argv):
+ seiscomp.client.Application.__init__(self, argc, argv)
+
+ self.setMessagingEnabled(False)
+ self.setDatabaseEnabled(False, False)
+
+ self._args = argv[1:]
+
+ def run(self):
+ if len(self._args) == 0:
+ sys.stderr.write(
+ "Usage: import_inv [{format}|help] [output]\n")
+ return False
+
+ if self._args[0] == "help":
+ if len(self._args) < 2:
+ sys.stderr.write("'help' can only be used with 'formats'\n")
+ sys.stderr.write("import_inv help formats\n")
+ return False
+
+ if self._args[1] == "formats":
+ return self.printFormats()
+
+ sys.stderr.write("unknown topic '%s'\n" % self._args[1])
+ return False
+
+ fmt = self._args[0]
+ try:
+ prog = os.path.join(
+ os.environ['SEISCOMP_ROOT'], "bin", "%s2inv" % fmt)
+ except:
+ sys.stderr.write(
+ "Could not get SeisComP root path, SEISCOMP_ROOT not set?\n")
+ return False
+
+ if not os.path.exists(prog):
+ sys.stderr.write("Format '%s' is not supported\n" % fmt)
+ return False
+
+ if len(self._args) < 2:
+ sys.stderr.write("Input missing\n")
+ return False
+
+ input = self._args[1]
+
+ if len(self._args) < 3:
+ filename = os.path.basename(os.path.abspath(input))
+ if not filename:
+ filename = fmt
+
+ # Append .xml if the ending is not already .xml
+ if filename[-4:] != ".xml":
+ filename = filename + ".xml"
+ storage_dir = os.path.join(
+ os.environ['SEISCOMP_ROOT'], "etc", "inventory")
+ output = os.path.join(storage_dir, filename)
+ try:
+ os.makedirs(storage_dir)
+ except:
+ pass
+ sys.stderr.write("Generating output to %s\n" % output)
+ else:
+ output = self._args[2]
+
+ proc = subprocess.Popen([prog, input, output],
+ stdout=None, stderr=None, shell=False)
+ chans = proc.communicate()
+ if proc.returncode != 0:
+ sys.stderr.write(
+ "Conversion failed, return code: %d\n" % proc.returncode)
+ return False
+
+ return True
+
+ def printFormats(self):
+ try:
+ path = os.path.join(os.environ['SEISCOMP_ROOT'], "bin", "*2inv")
+ except:
+ sys.stderr.write(
+ "Could not get SeisComP root path, SEISCOMP_ROOT not set?\n")
+ return False
+
+ files = glob.glob(path)
+ for f in files:
+ prog = os.path.basename(f)
+ prog = prog[:prog.find("2inv")]
+ sys.stdout.write("%s\n" % prog)
+
+ return True
+
+ def printUsage(self):
+
+ print('''Usage:
+ import_inv [FORMAT] input [output]
+ import_inv help [topic]
+
+Import inventory information from various sources.''')
+
+ seiscomp.client.Application.printUsage(self)
+
+ print('''Examples:
+List all supported inventory formats
+ import_inv help formats
+
+Convert from FDSN stationXML to SeisComp format
+ import_inv fdsnxml inventory_fdsnws.xml inventory_sc.xml
+''')
+
+if __name__ == "__main__":
+ app = Importer(len(sys.argv), sys.argv)
+ sys.exit(app())
diff --git a/bin/instdb2db2 b/bin/instdb2db2
new file mode 100755
index 0000000..51bde2e
--- /dev/null
+++ b/bin/instdb2db2
@@ -0,0 +1,278 @@
+#!/usr/bin/env seiscomp-python
+
+from __future__ import print_function
+import sys, os
+import csv
+from optparse import OptionParser
+
+def quote(instr):
+ return '"'+instr+'"'
+
+class base(object):
+ def __init__(self, filename, fields):
+ self.att = {}
+ fd = open(filename)
+ try:
+ try:
+ fieldNames = None
+ for row in csv.DictReader(fd, fieldNames):
+ id = row['id']
+ if id in self.att:
+ print("multiple %s found in %s" % (id, filename))
+ continue
+
+ for key in fields:
+ if not row[key]:
+ del(row[key])
+
+ del row['id']
+
+ try:
+ row['low_freq'] = float(row['low_freq'])
+ except KeyError:
+ pass
+
+ try:
+ row['high_freq'] = float(row['high_freq'])
+ except KeyError:
+ pass
+
+ self.att[id] = row
+
+ except KeyError as e:
+ raise Exception("column %s missing in %s" % (str(e), filename))
+
+ except (TypeError, ValueError) as e:
+ raise Exception("error reading %s: %s" % (filename, str(e)))
+
+ finally:
+ fd.close()
+
+ def keys(self):
+ return list(self.att.keys())
+
+ def screname(self, what):
+ nc = ""
+ nu = True
+ for c in what:
+ if c == '_':
+ nu = True
+ continue
+ if nu:
+ nc += c.upper()
+ nu = False
+ else:
+ nc += c
+
+ if nc == 'LowFreq': nc = 'LowFrequency'
+ if nc == 'HighFreq': nc = 'HighFrequency'
+
+ return nc
+
+ def reorder(self):
+ att = {}
+ if not self.att:
+ return None
+
+ for (code, row) in self.att.items():
+ for (k, v) in row.items():
+ k = self.screname(k)
+ try:
+ dk = att[k]
+ except:
+ dk = {}
+ att[k] = dk
+
+ try:
+ dv = dk[str(v)]
+ except:
+ dv = []
+ dk[str(v)] = dv
+
+ dv.append(code)
+ return att
+
+ def dump(self, fdo):
+ att = self.reorder()
+ lastK=None
+
+ for (k, v) in att.items():
+ if not lastK: lastK = k
+ if lastK != k:
+ fdo.write("\n")
+ for (kv, ids) in v.items():
+ fdo.write("Ia: %s=%s" % (k,quote(kv)))
+ for id in ids:
+ fdo.write(" %s" % id)
+ fdo.write("\n")
+ fdo.write("\n")
+
+class sensorAttributes(base):
+ def __init__(self, filename):
+ base.__init__(self, filename, ['id', 'type','unit', 'low_freq', 'high_freq', 'model', 'manufacturer', 'remark'])
+
+class dataloggerAttributes(base):
+ def __init__(self, filename):
+ base.__init__(self, filename, ['id', 'digitizer_model', 'digitizer_manufacturer', 'recorder_model', 'recorder_manufacturer', 'clock_model', 'clock_manufacturer', 'clock_type', 'remark'])
+
+class INST(object):
+ def cleanID(self, id):
+ nc = ""
+ for c in id:
+ nc += c
+ if c == '_':
+ nc = ""
+
+ return nc
+
+ def __init__(self, filename, attS, attD):
+ self.filename = filename
+ self.sensorA = sensorAttributes(attS)
+ self.dataloggerA = dataloggerAttributes(attD)
+ lines = []
+ f = open(filename)
+ for line in f:
+ line = line.strip()
+ if not line or line[0] == '#':
+ # Add comments line types
+ lines.append({ 'content': line, 'type': 'C', 'id': None})
+ else:
+ (id, line) = line.split(">", 1)
+ id = id.strip()
+ line = line.strip()
+ # Add undefined line types
+ lines.append({ 'content': line, 'type': 'U', 'id': id})
+ f.close()
+ self.lines = lines
+ self._filltypes()
+
+ def _filltypes(self):
+ for line in self.lines:
+ if line['type'] != 'U': continue
+ id = line['id']
+ if id.find('_FIR_') != -1:
+ line['type'] = 'F'
+ elif id.find('Sngl-gain_') != -1:
+ line['type'] = 'L'
+ line['id'] = self.cleanID(id)
+ elif id.find('_digipaz_') != -1:
+ line['type'] = 'P'
+ elif id.find('_iirpaz_') != -1:
+ line['type'] = 'I'
+
+ for line in self.lines:
+ if line['type'] != 'U': continue
+ id = self.cleanID(line['id'])
+
+ if id in list(self.sensorA.keys()):
+ line['type'] = 'S'
+ line['id'] = id
+ elif id in list(self.dataloggerA.keys()):
+ line['type'] = 'D'
+ line['id'] = id
+ # Those we are forcing !
+ elif id in ['OSIRIS-SC', 'Gaia', 'LE24', 'MALI', 'PSS', 'FDL', 'CMG-SAM', 'CMG-DCM', 'EDAS-24', 'SANIAC']:
+ line['id'] = id
+ line['type'] = 'D'
+ elif id in ['Trillium-Compact', 'Reftek-151/120', 'BBVS-60', 'CMG-3ESP/60F', 'LE-1D/1', 'L4-3D/BW', 'S13', 'GS13', 'SH-1', 'MP', 'MARKL22', 'CM-3', 'CMG-6T', 'SM-6/BW']:
+ line['id'] = id
+ line['type'] = 'S'
+
+ for line in self.lines:
+ if line['type'] == 'U':
+ print("'"+self.cleanID(line['id'])+"', ", end=' ')
+
+ def dump(self, fdo):
+ sa = False
+ da = False
+
+ dataloggerFieldSize = 0
+ sensorFieldSize = 0
+ for line in self.lines:
+ if line['type'] == 'C': continue
+ if line['type'] == 'S':
+ if len(line['id']) > sensorFieldSize:
+ sensorFieldSize = len(line['id'])
+ if line['type'] == 'D':
+ if len(line['id']) > dataloggerFieldSize:
+ dataloggerFieldSize = len(line['id'])
+
+ seLine = "Se: %%%ss %%s\n" % (-1*(sensorFieldSize+1))
+ dtLine = "Dl: %%%ss %%s\n" % (-1*(dataloggerFieldSize+1))
+ for line in self.lines:
+ if line['type'] == 'C':
+ fdo.write(line['content'] + "\n")
+ continue
+
+ if line['type'] == 'S':
+ if not sa:
+ self.sensorA.dump(fdo)
+ sa = True
+ fdo.write(seLine % (line['id'], line['content']))
+ continue
+
+ if line['type'] == 'D':
+ if not da:
+ self.dataloggerA.dump(fdo)
+ da = True
+ fdo.write(dtLine % (line['id'], line['content']))
+ continue
+
+ if line['type'] == 'L':
+ fdo.write("Cl: %s %s\n" % (line['id'], line['content']))
+ continue
+
+ if line['type'] == 'F':
+ fdo.write("Ff: %s %s\n" % (line['id'], line['content']))
+ continue
+
+ if line['type'] == 'P':
+ fdo.write("Pz: %s %s\n" % (line['id'], line['content']))
+ continue
+
+
+ if line['type'] == 'I':
+ fdo.write("If: %s %s\n" % (line['id'], line['content']))
+ continue
+
+def main():
+
+ parser = OptionParser(usage="Old tab to New tab converter", version="1.0", add_help_option=True)
+
+ parser.add_option("", "--sat", type="string",
+ help="Indicates the sensor attribute file to use", dest="sat", default="sensor_attr.csv")
+ parser.add_option("", "--dat", type="string",
+ help="Indicates the station attribute file to use", dest="dat", default="datalogger_attr.csv")
+ parser.add_option("-c", "--clean", action="store_true",
+ help="Remove the comments and blank lines", dest="cleanFile", default=False)
+
+ # Parsing & Error check
+ (options, args) = parser.parse_args()
+ errors = []
+
+ if len(args) != 1:
+ errors.append("need an Input filename")
+
+ if not os.path.isfile(options.sat):
+ errors.append("sensor attribute file '%s' not found." % options.sat)
+
+ if not os.path.isfile(options.dat):
+ errors.append("datalogger attribute file '%s' not found." % options.dat)
+
+ if len(args) == 2 and os.path.isfile(args[1]):
+ errors.append("output file already exists, will not overwrite.")
+
+ if errors:
+ print("Found error while processing the command line:", file=sys.stderr)
+ for error in errors:
+ print(" %s" % error, file=sys.stderr)
+ return 1
+
+ inputName = args[0]
+ i= INST(inputName, options.sat, options.dat)
+ fdo = sys.stdout if len(args) < 2 else open(args[1],"w")
+ i.dump(fdo)
+ fdo.close()
+
+if __name__ == "__main__":
+ main()
diff --git a/bin/inv2dlsv b/bin/inv2dlsv
new file mode 100755
index 0000000..343907e
--- /dev/null
+++ b/bin/inv2dlsv
@@ -0,0 +1,98 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+from __future__ import (absolute_import, division, print_function, unicode_literals)
+
+import sys
+import io
+from seiscomp.legacy.fseed import *
+from seiscomp.legacy.db.seiscomp3 import sc3wrap
+from seiscomp.legacy.db.seiscomp3.inventory import Inventory
+import seiscomp.datamodel, seiscomp.io
+
+ORGANIZATION = "EIDA"
+
+
+def iterinv(obj):
+ return (j for i in obj.values() for j in i.values())
+
+
+def main():
+ if len(sys.argv) < 1 or len(sys.argv) > 3:
+ sys.stderr.write("Usage inv2dlsv [in_xml [out_dataless]]\n")
+ return 1
+
+ if len(sys.argv) > 1:
+ inFile = sys.argv[1]
+ else:
+ inFile = "-"
+
+ if len(sys.argv) > 2:
+ out = sys.argv[2]
+ else:
+ out = ""
+
+ sc3wrap.dbQuery = None
+
+ ar = seiscomp.io.XMLArchive()
+ if ar.open(inFile) == False:
+ raise IOError(inFile + ": unable to open")
+
+ obj = ar.readObject()
+ if obj is None:
+ raise TypeError(inFile + ": invalid format")
+
+ sc3inv = seiscomp.datamodel.Inventory.Cast(obj)
+ if sc3inv is None:
+ raise TypeError(inFile + ": invalid format")
+
+ inv = Inventory(sc3inv)
+ inv.load_stations("*", "*", "*", "*")
+ inv.load_instruments()
+
+ vol = SEEDVolume(inv, ORGANIZATION, "", resp_dict=False)
+
+ for net in iterinv(inv.network):
+ for sta in iterinv(net.station):
+ for loc in iterinv(sta.sensorLocation):
+ for strm in iterinv(loc.stream):
+ try:
+ vol.add_chan(net.code, sta.code, loc.code,
+ strm.code, strm.start, strm.end)
+
+ except SEEDError as e:
+ sys.stderr.write("Error (%s,%s,%s,%s): %s\n" % (
+ net.code, sta.code, loc.code, strm.code, str(e)))
+
+ if not out or out == "-":
+ output = io.BytesIO()
+ vol.output(output)
+ stdout = sys.stdout.buffer if hasattr(sys.stdout, "buffer") else sys.stdout
+ stdout.write(output.getvalue())
+ stdout.flush()
+ output.close()
+ else:
+ with open(sys.argv[2], "wb") as fd:
+ vol.output(fd)
+
+ return 0
+
+
+if __name__ == "__main__":
+ try:
+ sys.exit(main())
+ except Exception as e:
+ sys.stderr.write("Error: %s" % str(e))
+ sys.exit(1)
diff --git a/bin/invextr b/bin/invextr
new file mode 100755
index 0000000..dae9a07
Binary files /dev/null and b/bin/invextr differ
diff --git a/bin/load_timetable b/bin/load_timetable
new file mode 100755
index 0000000..a4800a7
Binary files /dev/null and b/bin/load_timetable differ
diff --git a/bin/msrtsimul b/bin/msrtsimul
new file mode 100755
index 0000000..f1bcaff
--- /dev/null
+++ b/bin/msrtsimul
@@ -0,0 +1,280 @@
+#!/usr/bin/env seiscomp-python
+
+from __future__ import absolute_import, division, print_function
+
+import sys
+import os
+import time
+import datetime
+import calendar
+import stat
+
+from getopt import getopt, GetoptError
+from seiscomp import mseedlite as mseed
+
+
+#------------------------------------------------------------------------------
+def read_mseed_with_delays(delaydict, reciterable):
+ """
+ Create an iterator which takes into account configurable realistic delays.
+
+ This function creates an iterator which returns one miniseed record at a time.
+ Artificial delays can be introduced by using delaydict.
+
+ This function can be used to make simulations in real time more realistic
+ when e.g. some stations have a much higher delay than others due to
+ narrow bandwidth communication channels etc.
+
+ A delaydict has the following data structure:
+ keys: XX.ABC (XX: network code, ABC: station code). The key "default" is
+ a special value for the default delay.
+ values: Delay to be introduced in seconds
+
+ This function will rearrange the iterable object which has been used as
+ input for rt_simul() so that it can again be used by rt_simul but taking
+ artificial delays into account.
+ """
+ import heapq #pylint: disable=C0415
+
+ heap = []
+ min_delay = 0
+ default_delay = 0
+ if 'default' in delaydict:
+ default_delay = delaydict['default']
+ for rec in reciterable:
+ rec_time = calendar.timegm(rec.end_time.timetuple())
+ delay_time = rec_time
+ stationname = "%s.%s" % (rec.net, rec.sta)
+ if stationname in delaydict:
+ delay_time = rec_time + delaydict[stationname]
+ else:
+ delay_time = rec_time + default_delay
+ heapq.heappush(heap, (delay_time, rec))
+ toprectime = heap[0][0]
+ if toprectime - min_delay < rec_time:
+ topelement = heapq.heappop(heap)
+ yield topelement
+ while heap:
+ topelement = heapq.heappop(heap)
+ yield topelement
+
+
+#------------------------------------------------------------------------------
+def rt_simul(f, speed=1., jump=0., delaydict=None):
+ """
+ Iterator to simulate "real-time" MSeed input
+
+ At startup, the first MSeed record is read. The following records are
+ read in pseudo-real-time relative to the time of the first record,
+ resulting in data flowing at realistic speed. This is useful e.g. for
+ demonstrating real-time processing using real data of past events.
+
+ The data in the input file may be multiplexed, but *must* be sorted by
+ time, e.g. using 'mssort'.
+ """
+ rtime = time.time()
+ etime = None
+ skipping = True
+ record_iterable = mseed.Input(f)
+ if delaydict:
+ record_iterable = read_mseed_with_delays(delaydict, record_iterable)
+ for rec in record_iterable:
+ if delaydict:
+ rec_time = rec[0]
+ rec = rec[1]
+ else:
+ rec_time = calendar.timegm(rec.end_time.timetuple())
+ if etime is None:
+ etime = rec_time
+
+ if skipping:
+ if (rec_time - etime) / 60.0 < jump:
+ continue
+
+ etime = rec_time
+ skipping = False
+
+ tmax = etime + speed * (time.time() - rtime)
+ ms = 1000000.0 * (rec.nsamp / rec.fsamp)
+ last_sample_time = rec.begin_time + datetime.timedelta(microseconds=ms)
+ last_sample_time = calendar.timegm(last_sample_time.timetuple())
+ if last_sample_time > tmax:
+ time.sleep((last_sample_time - tmax + 0.001) / speed)
+ yield rec
+
+
+#------------------------------------------------------------------------------
+def usage():
+ print('''Usage:
+ msrtsimul [options] file
+
+MiniSEED real time playback and simulation
+
+msrtsimul reads sorted (and possibly multiplexed) MiniSEED files and writes
+individual records in pseudo-real-time. This is useful e.g. for testing and
+simulating data acquisition. Output is
+$SEISCOMP_ROOT/var/run/seedlink/mseedfifo unless --seedlink or -c is used.
+
+
+Options:
+ -c, --stdout write on standard output
+ -d, --delays add artificial delays
+ -s, --speed speed factor (float)
+ -j, --jump minutes to skip (float)
+ --test test mode
+ -m --mode choose between 'realtime' and 'historic'
+ --seedlink choose the seedlink module name. Useful if a seedlink
+ alias or non-standard names are used. Replaces 'seedlink'
+ in the standard mseedfifo path.
+ -v, --verbose verbose mode
+ -h, --help display this help message
+
+Examples:
+Play back miniSEED waveforms in real time with verbose output
+ msrtsimul -v miniSEED-file
+''')
+
+
+#------------------------------------------------------------------------------
+def main():
+ py2 = sys.version_info < (3,)
+
+ ifile = sys.stdin if py2 else sys.stdin.buffer
+ verbosity = 0
+ speed = 1.
+ jump = 0.
+ test = False
+ seedlink = 'seedlink'
+ mode = 'realtime'
+ setSystemTime = False
+
+ try:
+ opts, args = getopt(sys.argv[1:], "cd:s:j:vhm:",
+ ["stdout", "delays=", "speed=", "jump=", "test",
+ "verbose", "help", "mode=", "seedlink="])
+ except GetoptError:
+ usage()
+ return 1
+
+ out_channel = None
+ delays = None
+
+ for flag, arg in opts:
+ if flag in ("-c", "--stdout"):
+ out_channel = sys.stdout if py2 else sys.stdout.buffer
+ elif flag in ("-d", "--delays"):
+ delays = arg
+ elif flag in ("-s", "--speed"):
+ speed = float(arg)
+ elif flag in ("-j", "--jump"):
+ jump = float(arg)
+ elif flag in ("-m", "--mode"):
+ mode = arg
+ elif flag == "--seedlink":
+ seedlink = arg
+ elif flag in ("-v", "--verbose"):
+ verbosity += 1
+ elif flag == "--test":
+ test = True
+ else:
+ usage()
+ if flag in ("-h", "--help"):
+ return 0
+ return 1
+
+ if len(args) == 1:
+ if args[0] != "-":
+ try:
+ ifile = open(args[0], "rb")
+ except IOError as e:
+ print("could not open input file '{}' for reading: {}" \
+ .format(args[0], e), file=sys.stderr)
+ sys.exit(1)
+ elif len(args) != 0:
+ usage()
+ return 1
+
+ if out_channel is None:
+ try:
+ sc_root = os.environ["SEISCOMP_ROOT"]
+ except KeyError:
+ print("SEISCOMP_ROOT environment variable is not set", file=sys.stderr)
+ sys.exit(1)
+
+ mseed_fifo = os.path.join(sc_root, "var", "run", seedlink, "mseedfifo")
+ if verbosity:
+ print("output data to %s" % mseed_fifo, file=sys.stderr)
+
+ if not os.path.exists(mseed_fifo):
+ print("""\
+ERROR: {} does not exist.
+In order to push the records to SeedLink, it needs to run and must be configured for real-time playback.
+""".format(mseed_fifo), file=sys.stderr)
+ sys.exit(1)
+
+ if not stat.S_ISFIFO(os.stat(mseed_fifo).st_mode):
+ print("""\
+ERROR: {} is not a named pipe
+Check if SeedLink is running and configured for real-time playback.
+""".format(mseed_fifo), file=sys.stderr)
+ sys.exit(1)
+
+ try:
+ out_channel = open(mseed_fifo, "wb")
+ except Exception as e:
+ print(str(e), file=sys.stderr)
+ sys.exit(1)
+
+ try:
+ delaydict = None
+ if delays:
+ delaydict = dict()
+ try:
+ f = open(delays, 'r')
+ for line in f:
+ content = line.split(':')
+ if len(content) != 2:
+ raise Exception("Could not parse a line in file %s: %s\n" % (delays, line))
+ delaydict[content[0].strip()] = float(content[1].strip())
+ except Exception as e:
+ print("Error reading delay file {}: {}".format(delays, e),
+ file=sys.stderr)
+
+ inp = rt_simul(ifile, speed=speed, jump=jump, delaydict=delaydict)
+ stime = time.time()
+
+ time_diff = None
+ print("Starting msrtsimul at {}".format(datetime.datetime.utcnow()), file=sys.stderr)
+ for rec in inp:
+ if rec.size != 512:
+ print("Skipping record of {}.{}.{}.{} starting on {}: length != 512 Bytes: ".format(rec.net, rec.sta, rec.loc, rec.cha, str(rec.begin_time)), file=sys.stderr)
+ continue
+ if time_diff is None:
+ ms = 1000000.0 * (rec.nsamp / rec.fsamp)
+ time_diff = datetime.datetime.utcnow() - rec.begin_time - \
+ datetime.timedelta(microseconds=ms)
+ if mode == 'realtime':
+ rec.begin_time += time_diff
+
+ if verbosity:
+ print("%s_%s %7.2f %s %7.2f" % \
+ (rec.net, rec.sta, (time.time() - stime), str(rec.begin_time),
+ time.time() - calendar.timegm(rec.begin_time.timetuple())),
+ file=sys.stderr)
+
+ if not test:
+ rec.write(out_channel, 9)
+ out_channel.flush()
+
+ except KeyboardInterrupt:
+ pass
+ except Exception as e:
+ print("Exception: {}".format(str(e)), file=sys.stderr)
+ return 1
+
+ return 0
+
+
+#------------------------------------------------------------------------------
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/bin/ql2sc b/bin/ql2sc
new file mode 100755
index 0000000..dbab169
Binary files /dev/null and b/bin/ql2sc differ
diff --git a/bin/run_with_lock b/bin/run_with_lock
new file mode 100755
index 0000000..9c0e4b9
Binary files /dev/null and b/bin/run_with_lock differ
diff --git a/bin/sc2pa b/bin/sc2pa
new file mode 100755
index 0000000..87db810
--- /dev/null
+++ b/bin/sc2pa
@@ -0,0 +1,217 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+import time
+import sys
+import os
+import time
+import seiscomp.core, seiscomp.client, seiscomp.datamodel, seiscomp.logging
+from seiscomp.scbulletin import Bulletin, stationCount
+
+
+class ProcAlert(seiscomp.client.Application):
+ def __init__(self, argc, argv):
+ seiscomp.client.Application.__init__(self, argc, argv)
+
+ self.setMessagingEnabled(True)
+ self.setDatabaseEnabled(True, True)
+
+ self.setAutoApplyNotifierEnabled(True)
+ self.setInterpretNotifierEnabled(True)
+
+ self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP)
+ self.addMessagingSubscription("EVENT")
+ self.addMessagingSubscription("LOCATION")
+ self.addMessagingSubscription("MAGNITUDE")
+
+ self.maxAgeDays = 1.
+ self.minPickCount = 25
+
+ self.procAlertScript = ""
+
+ ep = seiscomp.datamodel.EventParameters()
+
+ def createCommandLineDescription(self):
+ try:
+ self.commandline().addGroup("Publishing")
+ self.commandline().addIntOption("Publishing", "min-arr",
+ "Minimum arrival count of a published origin", self.minPickCount)
+ self.commandline().addDoubleOption("Publishing", "max-age",
+ "Maximum age in days of published origins", self.maxAgeDays)
+ self.commandline().addStringOption("Publishing", "procalert-script",
+ "Specify the script to publish an event. The ProcAlert file and the event id are passed as parameter $1 and $2")
+ self.commandline().addOption("Publishing", "test",
+ "Test mode, no messages are sent")
+ except:
+ seiscomp.logging.warning(
+ "caught unexpected error %s" % sys.exc_info())
+
+ def initConfiguration(self):
+ if not seiscomp.client.Application.initConfiguration(self):
+ return False
+
+ try:
+ self.procAlertScript = self.configGetString("scripts.procAlert")
+ except:
+ pass
+
+ try:
+ self.minPickCount = self.configGetInt("minArrivals")
+ except:
+ pass
+
+ try:
+ self.maxAgeDays = self.configGetDouble("maxAgeDays")
+ except:
+ pass
+
+ return True
+
+ def init(self):
+ if not seiscomp.client.Application.init(self):
+ return False
+
+ try:
+ self.procAlertScript = self.commandline().optionString("procalert-script")
+ except:
+ pass
+
+ try:
+ self.minPickCount = self.commandline().optionInt("min-arr")
+ except:
+ pass
+
+ try:
+ self.maxAgeDays = self.commandline().optionDouble("max-age")
+ except:
+ pass
+
+ self.bulletin = Bulletin(self.query(), "autoloc1")
+ self.cache = seiscomp.datamodel.PublicObjectRingBuffer(
+ self.query(), 100)
+
+ if not self.procAlertScript:
+ seiscomp.logging.warning("No procalert script given")
+ else:
+ seiscomp.logging.info(
+ "Using procalert script: %s" % self.procAlertScript)
+
+ return True
+
+ def addObject(self, parentID, obj):
+ org = seiscomp.datamodel.Origin.Cast(obj)
+ if org:
+ self.cache.feed(org)
+ seiscomp.logging.info("Received origin %s" % org.publicID())
+ return
+
+ self.updateObject(parentID, obj)
+
+ def updateObject(self, parentID, obj):
+ try:
+ evt = seiscomp.datamodel.Event.Cast(obj)
+ if evt:
+ orid = evt.preferredOriginID()
+
+ org = self.cache.get(seiscomp.datamodel.Origin, orid)
+ if not org:
+ seiscomp.logging.error("Unable to fetch origin %s" % orid)
+ return
+
+ if org.arrivalCount() == 0:
+ self.query().loadArrivals(org)
+ if org.stationMagnitudeCount() == 0:
+ self.query().loadStationMagnitudes(org)
+ if org.magnitudeCount() == 0:
+ self.query().loadMagnitudes(org)
+
+ if not self.originMeetsCriteria(org, evt):
+ seiscomp.logging.warning("Origin %s not published" % orid)
+ return
+
+ txt = self.bulletin.printEvent(evt)
+
+ for line in txt.split("\n"):
+ line = line.rstrip()
+ seiscomp.logging.info(line)
+ seiscomp.logging.info("")
+
+ if not self.commandline().hasOption("test"):
+ self.send_procalert(txt, evt.publicID())
+
+ return
+
+ except:
+ sys.stderr.write("%s\n" % sys.exc_info())
+
+ def hasValidNetworkMagnitude(self, org, evt):
+ nmag = org.magnitudeCount()
+ for imag in range(nmag):
+ mag = org.magnitude(imag)
+ if mag.publicID() == evt.preferredMagnitudeID():
+ return True
+ return False
+
+ def send_procalert(self, txt, evid):
+ if self.procAlertScript:
+ tmp = "/tmp/yyy%s" % evid.replace("/", "_").replace(":", "-")
+ f = file(tmp, "w")
+ f.write("%s" % txt)
+ f.close()
+
+ os.system(self.procAlertScript + " " + tmp + " " + evid)
+
+ def coordinates(self, org):
+ return org.latitude().value(), org.longitude().value(), org.depth().value()
+
+ def originMeetsCriteria(self, org, evt):
+ publish = True
+
+ lat, lon, dep = self.coordinates(org)
+
+ if 43 < lat < 70 and -10 < lon < 60 and dep > 200:
+ seiscomp.logging.error("suspicious region/depth - ignored")
+ publish = False
+
+ if stationCount(org) < self.minPickCount:
+ seiscomp.logging.error("too few picks - ignored")
+ publish = False
+
+ now = seiscomp.core.Time.GMT()
+ if (now - org.time().value()).seconds()/86400. > self.maxAgeDays:
+ seiscomp.logging.error("origin too old - ignored")
+ publish = False
+
+ try:
+ if org.evaluationMode() == seiscomp.datamodel.MANUAL:
+ publish = True
+ except:
+ pass
+
+ try:
+ if org.evaluationStatus() == seiscomp.datamodel.CONFIRMED:
+ publish = True
+ except:
+ pass
+
+ if not self.hasValidNetworkMagnitude(org, evt):
+ seiscomp.logging.error("no network magnitude - ignored")
+ publish = False
+
+ return publish
+
+
+app = ProcAlert(len(sys.argv), sys.argv)
+sys.exit(app())
diff --git a/bin/sc32inv b/bin/sc32inv
new file mode 120000
index 0000000..7d624b2
--- /dev/null
+++ b/bin/sc32inv
@@ -0,0 +1 @@
+scml2inv
\ No newline at end of file
diff --git a/bin/scalert b/bin/scalert
new file mode 100755
index 0000000..46923be
--- /dev/null
+++ b/bin/scalert
@@ -0,0 +1,717 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+import os
+import sys
+import re
+import subprocess
+import traceback
+import seiscomp.core, seiscomp.client, seiscomp.datamodel, seiscomp.math
+import seiscomp.logging, seiscomp.seismology, seiscomp.system
+
+
+class ObjectAlert(seiscomp.client.Application):
+
+ def __init__(self, argc, argv):
+ seiscomp.client.Application.__init__(self, argc, argv)
+
+ self.setMessagingEnabled(True)
+ self.setDatabaseEnabled(True, True)
+ self.setLoadRegionsEnabled(True)
+ self.setMessagingUsername("")
+ self.setPrimaryMessagingGroup(
+ seiscomp.client.Protocol.LISTENER_GROUP)
+ self.addMessagingSubscription("EVENT")
+ self.addMessagingSubscription("LOCATION")
+ self.addMessagingSubscription("MAGNITUDE")
+
+ self.setAutoApplyNotifierEnabled(True)
+ self.setInterpretNotifierEnabled(True)
+
+ self.setLoadCitiesEnabled(True)
+ self.setLoadRegionsEnabled(True)
+
+ self._ampType = "snr"
+ self._citiesMaxDist = 20
+ self._citiesMinPopulation = 50000
+
+ self._eventDescriptionPattern = None
+ self._pickScript = None
+ self._ampScript = None
+ self._alertScript = None
+ self._eventScript = None
+
+ self._pickProc = None
+ self._ampProc = None
+ self._alertProc = None
+ self._eventProc = None
+
+ self._newWhenFirstSeen = False
+ self._oldEvents = []
+ self._agencyIDs = []
+ self._phaseHints = []
+ self._phaseStreams = []
+ self._phaseNumber = 1
+ self._phaseInterval = 1
+
+ def createCommandLineDescription(self):
+ self.commandline().addOption("Generic", "first-new",
+ "calls an event a new event when it is seen the first time")
+ self.commandline().addGroup("Alert")
+ self.commandline().addStringOption("Alert", "amp-type",
+ "amplitude type to listen to", self._ampType)
+ self.commandline().addStringOption("Alert", "pick-script",
+ "script to be called when a pick arrived, network-, station code pick publicID are passed as parameters $1, $2, $3 and $4")
+ self.commandline().addStringOption("Alert", "amp-script",
+ "script to be called when a station amplitude arrived, network-, station code, amplitude and amplitude publicID are passed as parameters $1, $2, $3 and $4")
+ self.commandline().addStringOption("Alert", "alert-script",
+ "script to be called when a preliminary origin arrived, latitude and longitude are passed as parameters $1 and $2")
+ self.commandline().addStringOption("Alert", "event-script",
+ "script to be called when an event has been declared; the message string, a flag (1=new event, 0=update event), the EventID, the arrival count and the magnitude (optional when set) are passed as parameter $1, $2, $3, $4 and $5")
+ self.commandline().addGroup("Cities")
+ self.commandline().addStringOption("Cities", "max-dist",
+ "maximum distance for using the distance from a city to the earthquake")
+ self.commandline().addStringOption("Cities", "min-population",
+ "minimum population for a city to become a point of interest")
+ self.commandline().addGroup("Debug")
+ self.commandline().addStringOption("Debug", "eventid,E", "specify Event ID")
+ return True
+
+ def init(self):
+ if not seiscomp.client.Application.init(self):
+ return False
+
+ foundScript = False
+ # module configuration paramters
+ try:
+ self._newWhenFirstSeen = self.configGetBool("firstNew")
+ except:
+ pass
+
+ try:
+ self._agencyIDs = [self.configGetString("agencyID")]
+ except:
+ pass
+
+ try:
+ agencyIDs = self.configGetStrings("agencyIDs")
+ self._agencyIDs = []
+ for item in agencyIDs:
+ item = item.strip()
+ if item not in self._agencyIDs:
+ self._agencyIDs.append(item)
+ except:
+ pass
+
+
+ self._phaseHints = ['P','S']
+ try:
+ phaseHints = self.configGetStrings("constraints.phaseHints")
+ self._phaseHints = []
+ for item in phaseHints:
+ item = item.strip()
+ if item not in self._phaseHints:
+ self._phaseHints.append(item)
+ except:
+ pass
+
+ self._phaseStreams = []
+ try:
+ phaseStreams = self.configGetStrings("constraints.phaseStreams")
+ for item in phaseStreams:
+ rule = item.strip()
+ # rule is NET.STA.LOC.CHA and the special charactes ? * | ( ) are allowed
+ if not re.fullmatch(r'[A-Z|a-z|0-9|\?|\*|\||\(|\)|\.]+', rule):
+ seiscomp.logging.error("Wrong stream ID format in `constraints.phaseStreams`: %s" % item)
+ return False
+ # convert rule to a valid regular expression
+ rule = re.sub(r'\.', r'\.', rule)
+ rule = re.sub(r'\?', '.' , rule)
+ rule = re.sub(r'\*' , '.*' , rule)
+ if rule not in self._phaseStreams:
+ self._phaseStreams.append(rule)
+ except:
+ pass
+
+ try:
+ self._phaseNumber = self.configGetInt("constraints.phaseNumber")
+ except:
+ pass
+
+ try:
+ self._phaseInterval = self.configGetInt("constraints.phaseInterval")
+ except:
+ pass
+
+ if self._phaseNumber > 1:
+ self._pickCache = seiscomp.datamodel.PublicObjectTimeSpanBuffer()
+ self._pickCache.setTimeSpan(seiscomp.core.TimeSpan(self._phaseInterval))
+ self.enableTimer(1)
+
+ try:
+ self._eventDescriptionPattern = self.configGetString("poi.message")
+ except:
+ pass
+
+ try:
+ self._citiesMaxDist = self.configGetDouble("poi.maxDist")
+ except:
+ pass
+
+ try:
+ self._citiesMinPopulation = self.configGetInt("poi.minPopulation")
+ except:
+ pass
+
+ # mostly command-line options
+ try:
+ self._citiesMaxDist = self.commandline().optionDouble("max-dist")
+ except:
+ pass
+
+ try:
+ if self.commandline().hasOption("first-new"):
+ self._newWhenFirstSeen = True
+ except:
+ pass
+
+ try:
+ self._citiesMinPopulation = self.commandline().optionInt("min-population")
+ except:
+ pass
+
+ try:
+ self._ampType = self.commandline().optionString("amp-type")
+ except:
+ pass
+
+ try:
+ self._pickScript = self.commandline().optionString("pick-script")
+ except:
+ try:
+ self._pickScript = self.configGetString("scripts.pick")
+ except:
+ seiscomp.logging.warning("No pick script defined")
+
+ if self._pickScript:
+ self._pickScript = seiscomp.system.Environment.Instance().absolutePath(self._pickScript)
+ seiscomp.logging.info("Using pick script %s" % self._pickScript)
+
+ if not os.path.isfile(self._pickScript):
+ seiscomp.logging.error(" + not exising")
+ return False
+
+ if not os.access(self._pickScript, os.X_OK):
+ seiscomp.logging.error(" + not executable")
+ return False
+
+ foundScript = True
+
+ try:
+ self._ampScript = self.commandline().optionString("amp-script")
+ except:
+ try:
+ self._ampScript = self.configGetString("scripts.amplitude")
+ except:
+ seiscomp.logging.warning("No amplitude script defined")
+
+ if self._ampScript:
+ self._ampScript = seiscomp.system.Environment.Instance().absolutePath(self._ampScript)
+ seiscomp.logging.info("Using amplitude script %s" % self._ampScript)
+
+ if not os.path.isfile(self._ampScript):
+ seiscomp.logging.error(" + not exising")
+ return False
+
+ if not os.access(self._ampScript, os.X_OK):
+ seiscomp.logging.error(" + not executable")
+ return False
+
+ foundScript = True
+
+ try:
+ self._alertScript = self.commandline().optionString("alert-script")
+ except:
+ try:
+ self._alertScript = self.configGetString("scripts.alert")
+ except:
+ seiscomp.logging.warning("No alert script defined")
+
+ if self._alertScript:
+ self._alertScript = seiscomp.system.Environment.Instance(
+ ).absolutePath(self._alertScript)
+ seiscomp.logging.info("Using alert script %s" % self._alertScript)
+
+ if not os.path.isfile(self._alertScript):
+ seiscomp.logging.error(" + not exising")
+ return False
+
+ if not os.access(self._alertScript, os.X_OK):
+ seiscomp.logging.error(" + not executable")
+ return False
+
+ foundScript = True
+
+ try:
+ self._eventScript = self.commandline().optionString("event-script")
+ except:
+ try:
+ self._eventScript = self.configGetString("scripts.event")
+ except:
+ seiscomp.logging.warning("No event script defined")
+
+ if self._eventScript:
+ self._eventScript = seiscomp.system.Environment.Instance(
+ ).absolutePath(self._eventScript)
+ seiscomp.logging.info("Using event script %s" % self._eventScript)
+
+ if not os.path.isfile(self._eventScript):
+ seiscomp.logging.error(" + not exising")
+ return False
+
+ if not os.access(self._eventScript, os.X_OK):
+ seiscomp.logging.error(" + not executable")
+ return False
+
+ foundScript = True
+
+ if not foundScript:
+ seiscomp.logging.error("Found no valid script in configuration")
+ return False
+
+ seiscomp.logging.info("Creating ringbuffer for 100 objects")
+ if not self.query():
+ seiscomp.logging.warning(
+ "No valid database interface to read from")
+ self._cache = seiscomp.datamodel.PublicObjectRingBuffer(
+ self.query(), 100)
+
+ if self._ampScript and self.connection():
+ seiscomp.logging.info(
+ "Amplitude script defined: subscribing to AMPLITUDE message group")
+ self.connection().subscribe("AMPLITUDE")
+
+ if self._pickScript and self.connection():
+ seiscomp.logging.info(
+ "Pick script defined: subscribing to PICK message group")
+ self.connection().subscribe("PICK")
+
+ if self._newWhenFirstSeen:
+ seiscomp.logging.info(
+ "A new event is declared when I see it the first time")
+
+ seiscomp.logging.info("Filtering:")
+ if " ".join(self._agencyIDs):
+ seiscomp.logging.info(" + agencyIDs filter for events and picks: %s" % (" ".join(self._agencyIDs)))
+ else:
+ seiscomp.logging.info(" + agencyIDs: no filter is applied")
+
+ if " ".join(self._phaseHints):
+ seiscomp.logging.info(" + phase hint filter for picks: '%s'" % (" ".join(self._phaseHints)))
+ else:
+ seiscomp.logging.info(" + phase hints: no filter is applied")
+
+ if " ".join(self._phaseStreams):
+ seiscomp.logging.info(" + phase stream ID filter for picks: '%s'" % (" ".join(self._phaseStreams)))
+ else:
+ seiscomp.logging.info(" + phase stream ID: no filter is applied")
+
+ return True
+
+ def run(self):
+ try:
+ try:
+ eventID = self.commandline().optionString("eventid")
+ event = self._cache.get(seiscomp.datamodel.Event, eventID)
+ if event:
+ self.notifyEvent(event)
+ except:
+ pass
+
+ return seiscomp.client.Application.run(self)
+ except:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+ return False
+
+
+ def runPickScript(self, pickObjectList):
+ if not self._pickScript:
+ return
+
+ for pickObject in pickObjectList:
+ # parse values
+ try:
+ net = pickObject.waveformID().networkCode()
+ except:
+ net = "unknown"
+ try:
+ sta = pickObject.waveformID().stationCode()
+ except:
+ sta = "unknown"
+ pickID = pickObject.publicID()
+ try:
+ phaseHint = pickObject.phaseHint().code()
+ except:
+ phaseHint = "unknown"
+
+ print(net, sta, pickID, phaseHint)
+
+ if self._pickProc is not None:
+ if self._pickProc.poll() is None:
+ seiscomp.logging.info(
+ "Pick script still in progress -> wait one second")
+ self._pickProc.wait(1)
+ if self._pickProc.poll() is None:
+ seiscomp.logging.warning(
+ "Pick script still in progress -> skipping message")
+ return
+ try:
+ self._pickProc = subprocess.Popen(
+ [self._pickScript, net, sta, pickID, phaseHint])
+ seiscomp.logging.info(
+ "Started pick script with pid %d" % self._pickProc.pid)
+ except:
+ seiscomp.logging.error(
+ "Failed to start pick script '%s'" % self._pickScript)
+
+ def runAmpScript(self, ampObject):
+ if not self._ampScript:
+ return
+
+ # parse values
+ net = ampObject.waveformID().networkCode()
+ sta = ampObject.waveformID().stationCode()
+ amp = ampObject.amplitude().value()
+ ampID = ampObject.publicID()
+
+ if self._ampProc is not None:
+ if self._ampProc.poll() is None:
+ seiscomp.logging.warning(
+ "Amplitude script still in progress -> skipping message")
+ return
+ try:
+ self._ampProc = subprocess.Popen(
+ [self._ampScript, net, sta, "%.2f" % amp, ampID])
+ seiscomp.logging.info(
+ "Started amplitude script with pid %d" % self._ampProc.pid)
+ except:
+ seiscomp.logging.error(
+ "Failed to start amplitude script '%s'" % self._ampScript)
+
+ def runAlert(self, lat, lon):
+ if not self._alertScript:
+ return
+
+ if self._alertProc is not None:
+ if self._alertProc.poll() is None:
+ seiscomp.logging.warning(
+ "AlertScript still in progress -> skipping message")
+ return
+ try:
+ self._alertProc = subprocess.Popen(
+ [self._alertScript, "%.1f" % lat, "%.1f" % lon])
+ seiscomp.logging.info(
+ "Started alert script with pid %d" % self._alertProc.pid)
+ except:
+ seiscomp.logging.error(
+ "Failed to start alert script '%s'" % self._alertScript)
+
+ def handleMessage(self, msg):
+ try:
+ dm = seiscomp.core.DataMessage.Cast(msg)
+ if dm:
+ for att in dm:
+ org = seiscomp.datamodel.Origin.Cast(att)
+ if org:
+ try:
+ if org.evaluationStatus() == seiscomp.datamodel.PRELIMINARY:
+ self.runAlert(org.latitude().value(),
+ org.longitude().value())
+ except:
+ pass
+
+ #ao = seiscomp.datamodel.ArtificialOriginMessage.Cast(msg)
+ # if ao:
+ # org = ao.origin()
+ # if org:
+ # self.runAlert(org.latitude().value(), org.longitude().value())
+ # return
+
+ seiscomp.client.Application.handleMessage(self, msg)
+ except:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+
+ def addObject(self, parentID, object):
+ try:
+ # pick
+ obj = seiscomp.datamodel.Pick.Cast(object)
+ if obj:
+ self._cache.feed(obj)
+ seiscomp.logging.debug("got new pick '%s'" % obj.publicID())
+ agencyID = obj.creationInfo().agencyID()
+ phaseHint = obj.phaseHint().code()
+ if self._phaseStreams:
+ waveformID = "%s.%s.%s.%s" % (
+ obj.waveformID().networkCode(), obj.waveformID().stationCode(),
+ obj.waveformID().locationCode(), obj.waveformID().channelCode())
+ matched = False
+ for rule in self._phaseStreams:
+ if re.fullmatch(rule, waveformID):
+ matched = True
+ break
+ if not matched:
+ seiscomp.logging.debug(
+ " + stream ID %s does not match constraints.phaseStreams rules"
+ % (waveformID))
+ return
+
+ if not self._agencyIDs or agencyID in self._agencyIDs:
+ if not self._phaseHints or phaseHint in self._phaseHints:
+ self.notifyPick(obj)
+ else:
+ seiscomp.logging.debug(" + phase hint %s does not match '%s'"
+ % (phaseHint, self._phaseHints))
+ else:
+ seiscomp.logging.debug(" + agencyID %s does not match '%s'"
+ % (agencyID, self._agencyIDs))
+ return
+
+ # amplitude
+ obj = seiscomp.datamodel.Amplitude.Cast(object)
+ if obj:
+ if obj.type() == self._ampType:
+ seiscomp.logging.debug("got new %s amplitude '%s'" % (
+ self._ampType, obj.publicID()))
+ self.notifyAmplitude(obj)
+ return
+
+ # origin
+ obj = seiscomp.datamodel.Origin.Cast(object)
+ if obj:
+ self._cache.feed(obj)
+ seiscomp.logging.debug("got new origin '%s'" % obj.publicID())
+
+ try:
+ if obj.evaluationStatus() == seiscomp.datamodel.PRELIMINARY:
+ self.runAlert(obj.latitude().value(),
+ obj.longitude().value())
+ except:
+ pass
+
+ return
+
+ # magnitude
+ obj = seiscomp.datamodel.Magnitude.Cast(object)
+ if obj:
+ self._cache.feed(obj)
+ seiscomp.logging.debug(
+ "got new magnitude '%s'" % obj.publicID())
+ return
+
+ # event
+ obj = seiscomp.datamodel.Event.Cast(object)
+ if obj:
+ org = self._cache.get(
+ seiscomp.datamodel.Origin, obj.preferredOriginID())
+ agencyID = org.creationInfo().agencyID()
+ seiscomp.logging.debug("got new event '%s'" % obj.publicID())
+ if not self._agencyIDs or agencyID in self._agencyIDs:
+ self.notifyEvent(obj, True)
+ return
+ except:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+
+ def updateObject(self, parentID, object):
+ try:
+ obj = seiscomp.datamodel.Event.Cast(object)
+ if obj:
+ org = self._cache.get(
+ seiscomp.datamodel.Origin, obj.preferredOriginID())
+ agencyID = org.creationInfo().agencyID()
+ seiscomp.logging.debug("update event '%s'" % obj.publicID())
+ if not self._agencyIDs or agencyID in self._agencyIDs:
+ self.notifyEvent(obj, False)
+ except:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+
+ def handleTimeout(self):
+ self.checkEnoughPicks()
+
+ def checkEnoughPicks(self):
+ if self._pickCache.size() >= self._phaseNumber:
+ # wait until self._phaseInterval has elapsed before calling the
+ # script (more picks might come)
+ timeWindowLength = (seiscomp.core.Time.GMT() - self._pickCache.oldest()).length()
+ if timeWindowLength >= self._phaseInterval:
+ picks = [seiscomp.datamodel.Pick.Cast(o) for o in self._pickCache]
+ self.runPickScript(picks)
+ self._pickCache.clear()
+
+ def notifyPick(self, pick):
+ if self._phaseNumber <= 1:
+ self.runPickScript([pick])
+ else:
+ self.checkEnoughPicks()
+ self._pickCache.feed(pick)
+
+ def notifyAmplitude(self, amp):
+ self.runAmpScript(amp)
+
+ def notifyEvent(self, evt, newEvent=True, dtmax=3600):
+ try:
+ org = self._cache.get(
+ seiscomp.datamodel.Origin, evt.preferredOriginID())
+ if not org:
+ seiscomp.logging.warning(
+ "unable to get origin %s, ignoring event message" % evt.preferredOriginID())
+ return
+
+ preliminary = False
+ try:
+ if org.evaluationStatus() == seiscomp.datamodel.PRELIMINARY:
+ preliminary = True
+ except:
+ pass
+
+ if preliminary == False:
+ nmag = self._cache.get(
+ seiscomp.datamodel.Magnitude, evt.preferredMagnitudeID())
+ if nmag:
+ mag = nmag.magnitude().value()
+ mag = "magnitude %.1f" % mag
+ else:
+ if len(evt.preferredMagnitudeID()) > 0:
+ seiscomp.logging.warning(
+ "unable to get magnitude %s, ignoring event message" % evt.preferredMagnitudeID())
+ else:
+ seiscomp.logging.warning(
+ "no preferred magnitude yet, ignoring event message")
+ return
+
+ # keep track of old events
+ if self._newWhenFirstSeen:
+ if evt.publicID() in self._oldEvents:
+ newEvent = False
+ else:
+ newEvent = True
+ self._oldEvents.append(evt.publicID())
+
+ dsc = seiscomp.seismology.Regions.getRegionName(
+ org.latitude().value(), org.longitude().value())
+
+ if self._eventDescriptionPattern:
+ try:
+ city, dist, azi = self.nearestCity(org.latitude().value(), org.longitude(
+ ).value(), self._citiesMaxDist, self._citiesMinPopulation)
+ if city:
+ dsc = self._eventDescriptionPattern
+ region = seiscomp.seismology.Regions.getRegionName(
+ org.latitude().value(), org.longitude().value())
+ distStr = str(int(seiscomp.math.deg2km(dist)))
+ dsc = dsc.replace("@region@", region).replace(
+ "@dist@", distStr).replace("@poi@", city.name())
+ except:
+ pass
+
+ seiscomp.logging.debug("desc: %s" % dsc)
+
+ dep = org.depth().value()
+ now = seiscomp.core.Time.GMT()
+ otm = org.time().value()
+
+ dt = (now - otm).seconds()
+
+ # if dt > dtmax:
+ # return
+
+ if dt > 3600:
+ dt = "%d hours %d minutes ago" % (dt/3600, (dt % 3600)/60)
+ elif dt > 120:
+ dt = "%d minutes ago" % (dt/60)
+ else:
+ dt = "%d seconds ago" % dt
+
+ if preliminary:
+ message = "earthquake, XXL, preliminary, %s, %s" % (dt, dsc)
+ else:
+ message = "earthquake, %s, %s, %s, depth %d kilometers" % (
+ dt, dsc, mag, int(dep+0.5))
+ seiscomp.logging.info(message)
+
+ if not self._eventScript:
+ return
+
+ if self._eventProc is not None:
+ if self._eventProc.poll() is None:
+ seiscomp.logging.warning(
+ "EventScript still in progress -> skipping message")
+ return
+
+ try:
+ param2 = 0
+ param3 = 0
+ param4 = ""
+ if newEvent:
+ param2 = 1
+
+ org = self._cache.get(
+ seiscomp.datamodel.Origin, evt.preferredOriginID())
+ if org:
+ try:
+ param3 = org.quality().associatedPhaseCount()
+ except:
+ pass
+
+ nmag = self._cache.get(
+ seiscomp.datamodel.Magnitude, evt.preferredMagnitudeID())
+ if nmag:
+ param4 = "%.1f" % nmag.magnitude().value()
+
+ self._eventProc = subprocess.Popen(
+ [self._eventScript, message, "%d" % param2, evt.publicID(), "%d" % param3, param4])
+ seiscomp.logging.info(
+ "Started event script with pid %d" % self._eventProc.pid)
+ except:
+ seiscomp.logging.error("Failed to start event script '%s %s %d %d %s'" % (
+ self._eventScript, message, param2, param3, param4))
+ except:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+
+ def printUsage(self):
+
+ print('''Usage:
+ scalert [options]
+
+Execute custom scripts upon arrival of objects or updates''')
+
+ seiscomp.client.Application.printUsage(self)
+
+ print('''Examples:
+Execute scalert on command line with debug output
+ scalert --debug
+''')
+
+app = ObjectAlert(len(sys.argv), sys.argv)
+sys.exit(app())
diff --git a/bin/scamp b/bin/scamp
new file mode 100755
index 0000000..811e759
Binary files /dev/null and b/bin/scamp differ
diff --git a/bin/scardac b/bin/scardac
new file mode 100755
index 0000000..be14b6a
Binary files /dev/null and b/bin/scardac differ
diff --git a/bin/scart b/bin/scart
new file mode 100755
index 0000000..da672f2
--- /dev/null
+++ b/bin/scart
@@ -0,0 +1,1363 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+from __future__ import print_function
+
+from getopt import gnu_getopt, GetoptError
+
+import bisect
+import glob
+import re
+import time
+import sys
+import os
+
+import seiscomp.core
+import seiscomp.client
+import seiscomp.config
+import seiscomp.io
+import seiscomp.system
+
+
+class Archive:
+ def __init__(self, archiveDirectory):
+ self.archiveDirectory = archiveDirectory
+ self.filePool = dict()
+ self.filePoolSize = 100
+
+ def iterators(self, begin, end, net, sta, loc, cha):
+ t = time.gmtime(begin.seconds())
+ t_end = time.gmtime(end.seconds())
+
+ start_year = t[0]
+
+ for year in range(start_year, t_end[0] + 1):
+ if year > start_year:
+ begin = seiscomp.core.Time.FromYearDay(year, 1)
+ t = time.gmtime(begin.seconds())
+
+ if net == "*":
+ netdir = self.archiveDirectory + str(year) + "/"
+ try:
+ files = os.listdir(netdir)
+ except BaseException:
+ sys.stderr.write(
+ "info: skipping year %i - not found in archive %s\n"
+ % (year, netdir)
+ )
+ continue
+
+ its = []
+ for file in files:
+ if not os.path.isdir(netdir + file):
+ continue
+ tmp_its = self.iterators(begin, end, file, sta, loc, cha)
+ for it in tmp_its:
+ its.append(it)
+
+ return its
+
+ if sta == "*":
+ stadir = self.archiveDirectory + str(year) + "/" + net + "/"
+
+ try:
+ files = os.listdir(stadir)
+ except BaseException:
+ sys.stderr.write(
+ "info: skipping network '%s' - not found in archive %s\n"
+ % (net, stadir)
+ )
+ continue
+
+ its = []
+ for file in files:
+ if not os.path.isdir(stadir + file):
+ continue
+ tmp_its = self.iterators(begin, end, net, file, loc, cha)
+ for it in tmp_its:
+ its.append(it)
+
+ return its
+
+ # Check if cha contains a regular expression or not
+ mr = re.match("[A-Z|a-z|0-9]*", cha)
+ if (mr and mr.group() != cha) or cha == "*":
+ cha = cha.replace("?", ".")
+ stadir = self.archiveDirectory + str(year) + "/" + net + "/" + sta + "/"
+ try:
+ files = os.listdir(stadir)
+ except BaseException:
+ sys.stderr.write(
+ "info: skipping station %s - no data files "
+ "found in archive %s\n" % (sta, stadir)
+ )
+ return []
+
+ its = []
+ for file in files:
+ if not os.path.isdir(stadir + file):
+ sys.stderr.write(
+ "info: skipping data file '%s' - not found in archive %s\n"
+ % (file, stadir)
+ )
+ continue
+
+ part = file[:3]
+ if cha != "*":
+ mr = re.match(cha, part)
+ if not mr or mr.group() != part:
+ continue
+
+ tmp_its = self.iterators(begin, end, net, sta, loc, part)
+ for it in tmp_its:
+ its.append(it)
+
+ return its
+
+ if loc == "*":
+ dir = (
+ self.archiveDirectory
+ + str(year)
+ + "/"
+ + net
+ + "/"
+ + sta
+ + "/"
+ + cha
+ + ".D/"
+ )
+ its = []
+
+ start_day = t[7]
+ if t_end[0] > year:
+ end_day = 366
+ else:
+ end_day = t_end[7]
+
+ files = glob.glob(dir + "*.%03d" % start_day)
+
+ # Find first day with data
+ while not files and start_day <= end_day:
+ start_day += 1
+ begin = seiscomp.core.Time.FromYearDay(year, start_day)
+ files = glob.glob(dir + "*.%03d" % start_day)
+
+ if not files:
+ t = time.gmtime(begin.seconds() - 86400)
+ sys.stderr.write(
+ "info: skipping streams '%s.%s.*.%s on %s '"
+ "- no data found for this day in archive %s\n"
+ % (net, sta, cha, time.strftime("%Y-%m-%d", t), dir)
+ )
+
+ for file in files:
+ file = file.split("/")[-1]
+ if not os.path.isfile(dir + file):
+ sys.stderr.write(
+ "info: skipping data file '%s' - not found in archive %s\n"
+ % (file, dir)
+ )
+ continue
+
+ tmp_its = self.iterators(
+ begin, end, net, sta, file.split(".")[2], cha
+ )
+ for it in tmp_its:
+ its.append(it)
+
+ return its
+
+ it = StreamIterator(self, begin, end, net, sta, loc, cha)
+ if it.record is not None:
+ return [it]
+
+ return []
+
+ def location(self, rt, net, sta, loc, cha):
+ t = time.gmtime(rt.seconds())
+ dir = str(t[0]) + "/" + net + "/" + sta + "/" + cha + ".D/"
+ file = (
+ net + "." + sta + "." + loc + "." + cha + ".D." + str(t[0]) + ".%03d" % t[7]
+ )
+ return dir, file
+
+ def findIndex(self, begin, end, file):
+ rs = seiscomp.io.FileRecordStream()
+ rs.setRecordType("mseed")
+ if not rs.setSource(self.archiveDirectory + file):
+ return None, None
+
+ ri = seiscomp.io.RecordInput(rs)
+
+ index = None
+ retRec = None
+
+ for rec in ri:
+ if rec is None:
+ break
+
+ if rec.samplingFrequency() <= 0:
+ continue
+
+ if rec.startTime() >= end:
+ break
+ if rec.endTime() < begin:
+ continue
+
+ index = rs.tell()
+ retRec = rec
+ break
+
+ rs.close()
+
+ return retRec, index
+
+ def readRecord(self, file, index):
+ try:
+ rs = self.filePool[file]
+ except BaseException:
+ rs = seiscomp.io.FileRecordStream()
+ rs.setRecordType("mseed")
+ if not rs.setSource(self.archiveDirectory + file):
+ return (None, None)
+
+ rs.seek(index)
+
+ # Remove old handles
+ if len(self.filePool) < self.filePoolSize:
+ # self.filePool.pop(self.fileList[-1])
+ # print "Remove %s from filepool" % self.fileList[-1]
+ # del self.fileList[-1]
+ self.filePool[file] = rs
+
+ ri = seiscomp.io.RecordInput(
+ rs, seiscomp.core.Array.INT, seiscomp.core.Record.SAVE_RAW
+ )
+ # Read only valid records
+ while True:
+ rec = next(ri)
+ if rec is None:
+ break
+ if rec.samplingFrequency() <= 0:
+ continue
+ break
+
+ index = rs.tell()
+
+ if rec is None:
+ # Remove file handle from pool
+ rs.close()
+ try:
+ self.filePool.pop(file)
+ except BaseException:
+ pass
+
+ return rec, index
+
+ def stepTime(self, rt):
+ rt = rt + seiscomp.core.TimeSpan(86400)
+ t = rt.get()
+ rt.set(t[1], t[2], t[3], 0, 0, 0, 0)
+ return rt
+
+
+class StreamIterator:
+ def __init__(self, ar, begin, end, net, sta, loc, cha):
+ self.archive = ar
+
+ self.begin = begin
+ self.end = end
+
+ self.net = net
+ self.sta = sta
+ self.loc = loc
+ self.cha = cha
+
+ self.compareEndTime = False
+
+ workdir, file = ar.location(begin, net, sta, loc, cha)
+ self.file = workdir + file
+ # print "Starting at file %s" % self.file
+
+ self.record, self.index = ar.findIndex(begin, end, self.file)
+ if self.record:
+ self.current = self.record.startTime()
+ self.currentEnd = self.record.endTime()
+
+ def next(self):
+ # needed for Python 2 only
+ return self.__next__()
+
+ def __next__(self):
+ while True:
+ self.record, self.index = self.archive.readRecord(self.file, self.index)
+ if self.record:
+ self.current = self.record.startTime()
+ self.currentEnd = self.record.endTime()
+ if self.current >= self.end:
+ self.record = None
+ return self.record
+ else:
+ # Skip the current day file
+ self.current = self.archive.stepTime(self.current)
+ # Are we out of scope?
+ if self.current >= self.end:
+ self.record = None
+ return self.record
+
+ # Use the new file and start from the beginning
+ workdir, file = self.archive.location(
+ self.current, self.net, self.sta, self.loc, self.cha
+ )
+ self.file = workdir + file
+ self.index = 0
+
+ def __cmp__(self, other):
+ if self.compareEndTime:
+ if self.currentEnd > other.currentEnd:
+ return 1
+ elif self.currentEnd < other.currentEnd:
+ return -1
+ return 0
+ else:
+ if self.current > other.current:
+ return 1
+ elif self.current < other.current:
+ return -1
+ return 0
+
+ def __lt__(self, other):
+ if self.__cmp__(other) < 0:
+ return True
+ return False
+
+
+class ArchiveIterator:
+ def __init__(self, ar, sortByEndTime):
+ self.archive = ar
+ self.streams = []
+ self.sortByEndTime = sortByEndTime
+
+ def append(self, beginTime, endTime, net, sta, loc, cha):
+ its = self.archive.iterators(beginTime, endTime, net, sta, loc, cha)
+ for it in its:
+ it.compareEndTime = self.sortByEndTime
+ bisect.insort(self.streams, it)
+
+ def appendStation(self, beginTime, endTime, net, sta):
+ self.append(beginTime, endTime, net, sta, "*", "*")
+
+ def nextSort(self):
+ if not self.streams:
+ return None
+
+ stream = self.streams.pop(0)
+
+ rec = stream.record
+
+ next(stream)
+
+ if stream.record is not None:
+ # Put the stream back on the right (sorted) position
+ bisect.insort(self.streams, stream)
+
+ return rec
+
+
+class Copy:
+ def __init__(self, archiveIterator):
+ self.archiveIterator = archiveIterator
+
+ def __iter__(self):
+ for stream in self.archiveIterator.streams:
+ rec = stream.record
+ while rec:
+ yield rec
+ rec = next(stream)
+
+
+class Sorter:
+ def __init__(self, archiveIterator):
+ self.archiveIterator = archiveIterator
+
+ def __iter__(self):
+ while True:
+ rec = self.archiveIterator.nextSort()
+ if not rec:
+ return
+ yield rec
+
+
+####################################################################
+##
+# Application block
+##
+####################################################################
+
+
+def checkFile(fileName):
+ """
+ Check the miniSEED records in a file, report unsorted records.
+
+ Parameters
+ ----------
+ fileName : miniSEED
+ Waveform file to check.
+
+ Returns
+ -------
+ false
+ If no error is found in file
+ error string
+ If file or records are corrupted
+
+ """
+ rs = seiscomp.io.FileRecordStream()
+ rs.setRecordType("mseed")
+
+ if not rs.setSource(fileName):
+ return "cannot read file"
+
+ ri = seiscomp.io.RecordInput(rs)
+ lastEnd = None
+ for rec in ri:
+ if rec is None:
+ continue
+
+ sF = rec.samplingFrequency()
+ if sF <= 0:
+ continue
+
+ if lastEnd and rec.endTime() <= lastEnd:
+ overlap = float(lastEnd - rec.endTime())
+
+ if overlap >= 1 / sF:
+ errorMsg = (
+ "new record ends at or before end of last record: %s < %s"
+ % (rec.startTime(), lastEnd)
+ )
+ return errorMsg
+
+ lastEnd = rec.endTime()
+
+ return False
+
+
+def checkFilePrint(fileName, streamDict):
+ """
+ Check the miniSEED records in a file, report NSLC along with parameters
+
+ Parameters
+ ----------
+ fileName : miniSEED
+ Waveform file to check.
+
+ Returns
+ -------
+ false
+ If no error is found in file
+ error string
+ If file or records are corrupted
+
+ """
+ rs = seiscomp.io.FileRecordStream()
+ rs.setRecordType("mseed")
+
+ if not rs.setSource(fileName):
+ return "cannot read file"
+
+ ri = seiscomp.io.RecordInput(rs)
+ for rec in ri:
+ if rec is None:
+ continue
+
+ stream = f"{rec.networkCode()}.{rec.stationCode()}.{rec.locationCode()}.{rec.channelCode()}"
+ recStart = rec.startTime()
+ recEnd = rec.endTime()
+
+ if stream in streamDict:
+ streamStart = streamDict[stream][0]
+ streamEnd = streamDict[stream][1]
+ streamNRec = streamDict[stream][2]
+ streamNSamp = streamDict[stream][3]
+ if recStart.valid() and recStart.iso() < streamStart:
+ # update start time
+ streamDict.update(
+ {
+ stream: (
+ recStart.iso(),
+ streamEnd,
+ streamNRec + 1,
+ streamNSamp + rec.data().size(),
+ rec.samplingFrequency()
+ )
+ }
+ )
+ if recEnd.valid() and recEnd.iso() > streamEnd:
+ # update end time
+ streamDict.update(
+ {
+ stream: (
+ streamStart,
+ recEnd.iso(),
+ streamNRec + 1,
+ streamNSamp + rec.data().size(),
+ rec.samplingFrequency()
+ )
+ }
+ )
+ else:
+ # add stream for the first time
+ streamDict[stream] = (
+ recStart.iso(),
+ recEnd.iso(),
+ 1,
+ rec.data().size(),
+ rec.samplingFrequency()
+ )
+
+ return True
+
+
+def str2time(timestring):
+ """
+ Liberally accept many time string formats and convert them to a
+ seiscomp.core.Time
+ """
+
+ timestring = timestring.strip()
+ for c in ["-", "/", ":", "T", "Z"]:
+ timestring = timestring.replace(c, " ")
+ timestring = timestring.split()
+ try:
+ assert 3 <= len(timestring) <= 6
+ except AssertionError:
+ print(
+ "error: Provide a valid time format, e.g.: 'YYYY-MM-DD hh:mm:ss'",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ timestring.extend((6 - len(timestring)) * ["0"])
+ timestring = " ".join(timestring)
+ format = "%Y %m %d %H %M %S"
+ if timestring.find(".") != -1:
+ format += ".%f"
+
+ t = seiscomp.core.Time()
+ t.fromString(timestring, format)
+ return t
+
+
+def time2str(time):
+ """
+ Convert a seiscomp.core.Time to a string
+ """
+ return time.toString("%Y-%m-%d %H:%M:%S.%2f")
+
+
+def create_dir(dir):
+ if os.access(dir, os.W_OK):
+ return True
+
+ try:
+ os.makedirs(dir)
+ return True
+ except BaseException:
+ return False
+
+
+def isFile(url):
+ toks = url.split("://")
+ return len(toks) < 2 or toks[0] == "file"
+
+
+def readStreamList(listFile):
+ """
+ Read list of streams from file
+
+ Parameters
+ ----------
+ file : file
+ Input list file, one line per stream
+ format: NET.STA.LOC.CHA
+
+ Returns
+ -------
+ list
+ streams.
+
+ """
+ streams = []
+
+ try:
+ if listFile == "-":
+ f = sys.stdin
+ listFile = "stdin"
+ else:
+ f = open(listFile, "r")
+ except Exception:
+ print("error: unable to open '{}'".format(listFile), file=sys.stderr)
+ return []
+
+ lineNumber = -1
+ for line in f:
+ lineNumber = lineNumber + 1
+ line = line.strip()
+ # ignore comments
+ if len(line) > 0 and line[0] == "#":
+ continue
+
+ if len(line) == 0:
+ continue
+
+ toks = line.split(".")
+ if len(toks) != 4:
+ f.close()
+ print(
+ "error: %s in line %d has invalid line format, expecting "
+ "NET.STA.LOC.CHA - 1 line per stream" % (listFile, lineNumber),
+ file=sys.stderr,
+ )
+ return []
+
+ streams.append((toks[0], toks[1], toks[2], toks[3]))
+
+ f.close()
+
+ if len(streams) == 0:
+ return []
+
+ return streams
+
+
+def readStreamTimeList(listFile):
+ """
+ Read list of streams with time windows
+
+ Parameters
+ ----------
+ file : file
+ Input list file, one line per stream
+ format: 2007-03-28 15:48;2007-03-28 16:18;NET.STA.LOC.CHA
+
+ Returns
+ -------
+ list
+ streams.
+
+ """
+ streams = []
+
+ try:
+ if listFile == "-":
+ f = sys.stdin
+ listFile = "stdin"
+ else:
+ f = open(listFile, "r")
+ except BaseException:
+ sys.stderr.write("error: unable to open '{}'".format(listFile), file=sys.stderr)
+ return []
+
+ lineNumber = -1
+ for line in f:
+ lineNumber = lineNumber + 1
+ line = line.strip()
+ # ignore comments
+ if not line or line[0] == "#":
+ continue
+
+ toks = line.split(";")
+ if len(toks) != 3:
+ f.close()
+ sys.stderr.write(
+ "%s:%d: error: invalid line format, expected 3 "
+ "items separated by ';'\n" % (listFile, lineNumber)
+ )
+ return []
+
+ try:
+ tMin = str2time(toks[0].strip())
+ except BaseException:
+ f.close()
+ sys.stderr.write(
+ "%s:%d: error: invalid time format (tmin)\n" % (listFile, lineNumber)
+ )
+ return []
+
+ try:
+ tMax = str2time(toks[1].strip())
+ except BaseException:
+ f.close()
+ sys.stderr.write(
+ "%s:%d: error: invalid time format (tMax)\n" % (listFile, lineNumber)
+ )
+ return []
+
+ streamID = toks[2].strip()
+ toks = streamID.split(".")
+ if len(toks) != 4:
+ f.close()
+ sys.stderr.write(
+ "%s:%d: error: invalid stream format\n" % (listFile, lineNumber)
+ )
+ return []
+
+ streams.append((tMin, tMax, toks[0], toks[1], toks[2], toks[3]))
+
+ f.close()
+
+ return streams
+
+
+usage_info = """
+Usage:
+ scart [options] [archive]
+
+Import miniSEED waveforms or dump records from an SDS structure, sort them,
+modify the time and replay them. Also check files and archives.
+
+Verbosity:
+ -h, --help Display this help message.
+ -v, --verbose Print verbose information.
+
+Mode:
+ --check arg Check mode: Check all files in the given directory structure
+ for erroneous miniSEED records. If no directory is given,
+ $SEISCOMP_ROOT/var/lib/archive is scanned.
+ -d, --dump Export (dump) mode. Read from SDS archive.
+ -I arg Import mode: Specify a recordstream URL when in import mode.
+ When using another recordstream than file a
+ stream list file is needed.
+ Default: file://- (stdin)
+
+Output:
+ -c arg Channel filter (regular expression).
+ Default: "(B|E|H|M|S)(D|H|L|N)(E|F|N|Z|1|2|3)"
+ -E Sort according to record end time; default is start time
+ --files arg Specify the file handles to cache; default: 100
+ -l, --list arg Use a stream list file instead of defined networks and
+ channels (-n and -c are ignored). The list can be generated
+ from events by scevtstreams. One line per stream
+ Line format: starttime;endtime;streamID
+ 2007-03-28 15:48;2007-03-28 16:18;GE.LAST.*.*
+ 2007-03-28 15:48;2007-03-28 16:18;GE.PMBI..BH?
+ -m, --modify Modify the record time for realtime playback when dumping.
+ -n arg Network code list (comma separated). Default: *
+ --nslc arg Use a stream list file for filtering the data by the given
+ streams. For dump mode only! One line per stream.
+ Format: NET.STA.LOC.CHA
+ -s, --sort Sort records.
+ --speed arg Specify the speed to dump the records. A value of 0 means
+ no delay. Otherwise speed is a multiplier of the real time
+ difference between the records.
+ --stdout Writes to stdout if import mode is used instead
+ of creating a SDS archive.
+ --print-streams Print stream information only and exit. Works in import, dump and
+ check mode. Output: NET.STA.LOC.CHA StartTime EndTime.
+ -t t1~t2 Specify time window (as one properly quoted string)
+ times are of course UTC and separated by a tilde '~' .
+ --test Test only, no record output.
+ --with-filecheck Check all accessed files after import. Unsorted or
+ unreadable files are reported to stderr.
+ --with-filename Print all accessed files to stdout after import.
+
+Examples:
+Read from /archive, create a miniSEED file where records are sorted by end time
+ scart -dsv -t '2007-03-28 15:48~2007-03-28 16:18' /archive > sorted.mseed
+
+Import miniSEED data from file [your file], create a SDS archive
+ scart -I file.mseed $SEISCOMP_ROOT/var/lib/archive
+
+Import miniSEED data into a SDS archive, check all modified files for errors
+ scart -I file.mseed --with-filecheck $SEISCOMP_ROOT/var/lib/archive
+
+Check an archive for files with out-of order records
+ scart --check /archive
+"""
+
+
+def usage(exitcode=0):
+ sys.stderr.write(usage_info)
+ sys.exit(exitcode)
+
+
+try:
+ opts, files = gnu_getopt(
+ sys.argv[1:],
+ "I:dsmEn:c:t:l:hv",
+ [
+ "stdout",
+ "with-filename",
+ "with-filecheck",
+ "dump",
+ "list=",
+ "nslc=",
+ "sort",
+ "modify",
+ "speed=",
+ "files=",
+ "verbose",
+ "test",
+ "help",
+ "check",
+ "print-streams",
+ ],
+ )
+except GetoptError:
+ usage(exitcode=1)
+
+
+tmin = None
+tmax = None
+endtime = False
+verbose = False
+sort = False
+modifyTime = False
+dump = False
+listFile = None
+nslcFile = None
+printStreams = False
+withFilename = False # Whether to output accessed files for import or not
+checkFiles = False # Check if output files are sorted by time
+checkSDS = False # check the SDS archive for errors in files
+test = False
+filePoolSize = 100
+# default = stdin
+recordURL = "file://-"
+
+speed = 0
+stdout = False
+
+channels = "(B|E|H|M|S)(D|H|L|N)(E|F|N|Z|1|2|3)"
+networks = "*"
+
+archiveDirectory = "./"
+
+
+for flag, arg in opts:
+ if flag == "-t":
+ try:
+ tmin, tmax = list(map(str2time, arg.split("~")))
+ except ValueError as e:
+ print("error: {}".format(e), file=sys.stderr)
+ print(
+ " Provide correct time interval: -t 'startTime~endtime'",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ elif flag == "-E":
+ endtime = True
+ elif flag in ["-h", "--help"]:
+ usage(exitcode=0)
+ elif flag in ["--check"]:
+ checkSDS = True
+ elif flag in ["--stdout"]:
+ stdout = True
+ elif flag in ["--with-filename"]:
+ withFilename = True
+ elif flag in ["--with-filecheck"]:
+ checkFiles = True
+ elif flag in ["-v", "--verbose"]:
+ verbose = True
+ elif flag in ["-d", "--dump"]:
+ dump = True
+ elif flag in ["-l", "--list"]:
+ listFile = arg
+ elif flag in ["--nslc"]:
+ nslcFile = arg
+ elif flag in ["--print-streams"]:
+ printStreams = True
+ elif flag in ["-s", "--sort"]:
+ sort = True
+ elif flag in ["-m", "--modify"]:
+ modifyTime = True
+ elif flag in ["--speed"]:
+ speed = float(arg)
+ elif flag in ["--files"]:
+ filePoolSize = int(arg)
+ elif flag in ["--test"]:
+ test = True
+ elif flag == "-I":
+ recordURL = arg
+ elif flag == "-n":
+ networks = arg
+ elif flag == "-c":
+ channels = arg
+ else:
+ usage(exitcode=1)
+
+
+if files:
+ archiveDirectory = files[0]
+else:
+ try:
+ archiveDirectory = os.environ["SEISCOMP_ROOT"] + "/var/lib/archive"
+ except BaseException:
+ pass
+
+try:
+ if archiveDirectory[-1] != "/":
+ archiveDirectory = archiveDirectory + "/"
+except BaseException:
+ pass
+
+if not stdout and not os.path.isdir(archiveDirectory):
+ sys.stderr.write(
+ "info: archive directory '%s' not found - stopping\n" % archiveDirectory
+ )
+ sys.exit(-1)
+
+archive = Archive(archiveDirectory)
+archive.filePoolSize = filePoolSize
+
+if verbose:
+ seiscomp.logging.enableConsoleLogging(seiscomp.logging.getAll())
+
+ if dump and not listFile:
+ if not tmin or not tmax:
+ print(
+ "info: provide a time window with '-t' or '-l' when using "
+ "'-d' - stopping",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+ if tmin >= tmax:
+ print(
+ "info: start time '{}' after end time '{}' - stopping".format(
+ time2str(tmin), time2str(tmax)
+ ),
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ sys.stderr.write("Time window: %s~%s\n" % (time2str(tmin), time2str(tmax)))
+
+ sys.stderr.write("Archive: %s\n" % archiveDirectory)
+ if dump:
+ if not sort and not modifyTime:
+ sys.stderr.write("Mode: DUMP\n")
+ elif sort and not modifyTime:
+ sys.stderr.write("Mode: DUMP & SORT\n")
+ elif not sort and modifyTime:
+ sys.stderr.write("Mode: DUMP & MODIFY_TIME\n")
+ elif sort and modifyTime:
+ sys.stderr.write("Mode: DUMP & SORT & MODIFY_TIME\n")
+ else:
+ sys.stderr.write("Mode: IMPORT\n")
+
+archiveIterator = ArchiveIterator(archive, endtime)
+
+if checkSDS:
+ dump = False
+ stdout = False
+
+if dump:
+ stdout = True
+
+if stdout:
+ out = sys.stdout
+ try:
+ # needed in Python 3, fails in Python 2
+ out = out.buffer
+ except AttributeError:
+ # assuming this is Python 2, nothing to be done
+ pass
+
+# list file witht times takes priority over nslc list
+if listFile:
+ nslcFile = None
+
+streamDict = {}
+if dump:
+ if listFile:
+ print("Stream file: '{}'".format(listFile), file=sys.stderr)
+ streams = readStreamTimeList(listFile)
+ for stream in streams:
+ if stream[0] >= stream[1]:
+ print(
+ "info: ignoring {}.{}.{}.{} - start {} after end {}".format(
+ stream[2], stream[3], stream[4], stream[5], stream[0], stream[1]
+ ),
+ file=sys.stderr,
+ )
+ continue
+
+ if verbose:
+ print(
+ "Adding stream to list: {}.{}.{}.{} {} - {}".format(
+ stream[2], stream[3], stream[4], stream[5], stream[0], stream[1]
+ ),
+ file=sys.stderr,
+ )
+ archiveIterator.append(
+ stream[0], stream[1], stream[2], stream[3], stream[4], stream[5]
+ )
+
+ elif nslcFile:
+ print("Stream file: '{}'".format(nslcFile), file=sys.stderr)
+ streams = readStreamList(nslcFile)
+ for stream in streams:
+ if verbose:
+ print(
+ "Adding stream to list: {}.{}.{}.{} {} - {}".format(
+ stream[0], stream[1], stream[2], stream[3], tmin, tmax
+ ),
+ file=sys.stderr,
+ )
+ archiveIterator.append(
+ tmin, tmax, stream[0], stream[1], stream[2], stream[3]
+ )
+
+ else:
+ if networks == "*":
+ archiveIterator.append(tmin, tmax, "*", "*", "*", channels)
+ else:
+ items = networks.split(",")
+ for n in items:
+ n = n.strip()
+ archiveIterator.append(tmin, tmax, n, "*", "*", channels)
+
+ stime = None
+ realTime = seiscomp.core.Time.GMT()
+
+ if sort:
+ records = Sorter(archiveIterator)
+ else:
+ records = Copy(archiveIterator)
+
+ foundRecords = 0
+ for rec in records:
+ # skip corrupt records
+ etime = seiscomp.core.Time(rec.endTime())
+
+ if stime is None:
+ stime = etime
+ if verbose:
+ sys.stderr.write("First record: %s\n" % stime.iso())
+
+ dt = etime - stime
+
+ now = seiscomp.core.Time.GMT()
+
+ if speed > 0:
+ playTime = (realTime + dt).toDouble() / speed
+ else:
+ playTime = now.toDouble()
+
+ sleepTime = playTime - now.toDouble()
+ if sleepTime > 0:
+ time.sleep(sleepTime)
+
+ if modifyTime:
+ recLength = etime - rec.startTime()
+ rec.setStartTime(seiscomp.core.Time(playTime) - recLength)
+
+ if verbose:
+ etime = rec.endTime()
+ print(
+ "{} time current: {} start: {} end: {}".format(
+ rec.streamID(),
+ seiscomp.core.Time.LocalTime().iso(),
+ rec.startTime().iso(),
+ etime.iso(),
+ ),
+ file=sys.stderr,
+ )
+
+ if printStreams:
+ stream = f"{rec.networkCode()}.{rec.stationCode()}.{rec.locationCode()}.{rec.channelCode()}"
+ recStart = rec.startTime()
+ recEnd = rec.endTime()
+
+ if stream in streamDict:
+ streamStart = streamDict[stream][0]
+ streamEnd = streamDict[stream][1]
+ streamNRec = streamDict[stream][2]
+ streamNSamp = streamDict[stream][3]
+ if recStart.valid() and recStart.iso() < streamStart:
+ # update start time
+ streamDict.update(
+ {
+ stream: (
+ recStart.iso(),
+ streamEnd,
+ streamNRec + 1,
+ streamNSamp + rec.data().size(),
+ rec.samplingFrequency()
+ )
+ }
+ )
+ if recEnd.valid() and recEnd.iso() > streamEnd:
+ # update end time
+ streamDict.update(
+ {
+ stream: (
+ streamStart,
+ recEnd.iso(),
+ streamNRec + 1,
+ streamNSamp + rec.data().size(),
+ rec.samplingFrequency()
+ )
+ }
+ )
+ else:
+ # add stream for the first time
+ streamDict[stream] = (
+ recStart.iso(),
+ recEnd.iso(),
+ 1,
+ rec.data().size(),
+ rec.samplingFrequency()
+ )
+
+ if not test and not printStreams:
+ out.write(rec.raw().str())
+
+ foundRecords += 1
+
+ if verbose:
+ print("Found records: {}".format(foundRecords), file=sys.stderr)
+
+ if test:
+ print("Test mode: no records written", file=sys.stderr)
+
+elif checkSDS:
+ foundIssues = 0
+ checkedFiles = 0
+ for path, subdirs, files in os.walk(archiveDirectory):
+ for name in files:
+ fileName = os.path.join(path, name)
+ checkedFiles += 1
+
+ if printStreams:
+ # only collect stream IDs
+ checkFilePrint(fileName, streamDict)
+ continue
+
+ issueFound = checkFile(fileName)
+ if issueFound:
+ foundIssues += 1
+ print("{} has an issue".format(fileName), file=sys.stderr)
+ print(" + " + issueFound, file=sys.stderr)
+
+ if not printStreams:
+ print(
+ "Found issues in {}/{} files".format(foundIssues, checkedFiles),
+ file=sys.stderr,
+ )
+
+else:
+ env = seiscomp.system.Environment.Instance()
+ cfg = seiscomp.config.Config()
+ env.initConfig(cfg, "scart")
+ try:
+ plugins = cfg.getStrings("plugins")
+ registry = seiscomp.system.PluginRegistry.Instance()
+ for p in plugins:
+ registry.addPluginName(p)
+ registry.loadPlugins()
+ except Exception:
+ pass
+
+ rs = seiscomp.io.RecordStream.Open(recordURL)
+ if rs is None:
+ sys.stderr.write("Unable to open recordstream '%s'\n" % recordURL)
+ sys.exit(-1)
+
+ if not rs.setRecordType("mseed"):
+ sys.stderr.write(
+ "Format 'mseed' is not supported by recordstream '%s'\n" % recordURL
+ )
+ sys.exit(-1)
+
+ if not isFile(recordURL):
+ if not listFile:
+ sys.stderr.write(
+ "A stream list is needed to fetch data from another source than a file\n"
+ )
+ sys.exit(-1)
+
+ streams = readStreamTimeList(listFile)
+ for stream in streams:
+ # Add stream to recordstream
+ if not rs.addStream(
+ stream[2], stream[3], stream[4], stream[5], stream[0], stream[1]
+ ):
+ if verbose:
+ sys.stderr.write(
+ "error: adding stream: %s %s %s.%s.%s.%s\n"
+ % (
+ stream[0],
+ stream[1],
+ stream[2],
+ stream[3],
+ stream[4],
+ stream[5],
+ )
+ )
+ else:
+ if verbose:
+ sys.stderr.write(
+ "adding stream: %s %s %s.%s.%s.%s\n"
+ % (
+ stream[0],
+ stream[1],
+ stream[2],
+ stream[3],
+ stream[4],
+ stream[5],
+ )
+ )
+
+ input = seiscomp.io.RecordInput(
+ rs, seiscomp.core.Array.INT, seiscomp.core.Record.SAVE_RAW
+ )
+ filePool = dict()
+ f = None
+ accessedFiles = set()
+ try:
+ for rec in input:
+ if printStreams:
+ stream = f"{rec.networkCode()}.{rec.stationCode()}.{rec.locationCode()}.{rec.channelCode()}"
+ recStart = rec.startTime()
+ recEnd = rec.endTime()
+
+ if stream in streamDict:
+ streamStart = streamDict[stream][0]
+ streamEnd = streamDict[stream][1]
+ streamNRec = streamDict[stream][2]
+ streamNSamp = streamDict[stream][3]
+ if recStart.valid() and recStart.iso() < streamStart:
+ # update start time
+ streamDict.update(
+ {
+ stream: (
+ recStart.iso(),
+ streamEnd,
+ streamNRec + 1,
+ streamNSamp + rec.data().size(),
+ rec.samplingFrequency()
+ )
+ }
+ )
+ if recEnd.valid() and recEnd.iso() > streamEnd:
+ # update end time
+ streamDict.update(
+ {
+ stream: (
+ streamStart,
+ recEnd.iso(),
+ streamNRec + 1,
+ streamNSamp + rec.data().size(),
+ rec.samplingFrequency()
+ )
+ }
+ )
+ else:
+ # add stream for the first time
+ streamDict[stream] = (
+ recStart.iso(),
+ recEnd.iso(),
+ 1,
+ rec.data().size(),
+ rec.samplingFrequency()
+ )
+
+ continue
+
+ if stdout:
+ out.write(rec.raw().str())
+ continue
+
+ dir, file = archive.location(
+ rec.startTime(),
+ rec.networkCode(),
+ rec.stationCode(),
+ rec.locationCode(),
+ rec.channelCode(),
+ )
+ file = dir + file
+
+ if not test:
+ try:
+ f = filePool[file]
+ except BaseException:
+ outdir = "/".join((archiveDirectory + file).split("/")[:-1])
+ if not create_dir(outdir):
+ sys.stderr.write("Could not create directory '%s'\n" % outdir)
+ sys.exit(-1)
+
+ try:
+ f = open(archiveDirectory + file, "ab")
+ except BaseException:
+ sys.stderr.write(
+ "File '%s' could not be opened for writing\n"
+ % (archiveDirectory + file)
+ )
+ sys.exit(-1)
+
+ # Remove old handles
+ if len(filePool) < filePoolSize:
+ filePool[file] = f
+
+ if withFilename or checkFiles:
+ accessedFiles.add(archiveDirectory + file)
+ f.write(rec.raw().str())
+ else:
+ if withFilename or checkFiles:
+ accessedFiles.add(archiveDirectory + file)
+
+ if verbose:
+ sys.stderr.write(
+ "%s %s %s\n" % (rec.streamID(), rec.startTime().iso(), file)
+ )
+ except Exception as e:
+ sys.stderr.write("Exception: %s\n" % str(e))
+
+ if checkFiles:
+ print("Testing accessed files (may take some time):", file=sys.stderr)
+ foundIssues = 0
+ checkedFiles = 0
+ for fileName in accessedFiles:
+ checkedFiles += 1
+ issueFound = checkFile(fileName)
+ if issueFound:
+ foundIssues += 1
+ print("{} has an issue".format(fileName), file=sys.stderr)
+ print(" + " + issueFound, file=sys.stderr)
+
+ print(
+ "Found issues in {}/{} files".format(foundIssues, checkedFiles),
+ file=sys.stderr,
+ )
+
+ if withFilename:
+ if verbose:
+ print("List of accessed files:", file=sys.stderr)
+ for fileName in accessedFiles:
+ print(fileName, file=sys.stdout)
+
+if len(streamDict) > 0:
+ print(
+ "# streamID start end records samples samplingRate",
+ file=sys.stdout,
+ )
+ for key, (start, end, nRecs, nSamples, sps) in sorted(streamDict.items()):
+ print(
+ f"{key: <{16}} {start: <{27}} {end: <{27}} {nRecs} {nSamples} {sps}",
+ file=sys.stdout,
+ )
diff --git a/bin/scautoloc b/bin/scautoloc
new file mode 100755
index 0000000..14f796c
Binary files /dev/null and b/bin/scautoloc differ
diff --git a/bin/scautopick b/bin/scautopick
new file mode 100755
index 0000000..8a1a977
Binary files /dev/null and b/bin/scautopick differ
diff --git a/bin/scbulletin b/bin/scbulletin
new file mode 100755
index 0000000..e343a81
--- /dev/null
+++ b/bin/scbulletin
@@ -0,0 +1,19 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+import seiscomp.scbulletin
+
+if __name__ == "__main__":
+ seiscomp.scbulletin.main()
diff --git a/bin/scchkcfg b/bin/scchkcfg
new file mode 100755
index 0000000..62339a1
Binary files /dev/null and b/bin/scchkcfg differ
diff --git a/bin/sccnv b/bin/sccnv
new file mode 100755
index 0000000..a7fb8ab
Binary files /dev/null and b/bin/sccnv differ
diff --git a/bin/scconfig b/bin/scconfig
new file mode 100755
index 0000000..de32e7f
Binary files /dev/null and b/bin/scconfig differ
diff --git a/bin/scdb b/bin/scdb
new file mode 100755
index 0000000..b73a533
Binary files /dev/null and b/bin/scdb differ
diff --git a/bin/scdbstrip b/bin/scdbstrip
new file mode 100755
index 0000000..133248b
--- /dev/null
+++ b/bin/scdbstrip
@@ -0,0 +1,1042 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+from __future__ import absolute_import, division, print_function
+
+import re
+import sys
+import traceback
+import seiscomp.core
+import seiscomp.client
+import seiscomp.logging
+import seiscomp.utils
+
+output = sys.stdout
+error = sys.stderr
+
+
+class RuntimeException(Exception):
+ def __init__(self, what):
+ self.what = what
+
+ def __str__(self):
+ return str(self.what)
+
+
+class ExitRequestException(RuntimeException):
+ def __init__(self):
+ pass
+
+ def __str__(self):
+ return "exit requested"
+
+
+class QueryInterface:
+ def __init__(self, database):
+ self._database = database
+
+ def cnvCol(self, col):
+ return self._database.convertColumnName(col)
+
+ def getTables(self):
+ return []
+
+ def deleteObjectQuery(self, *v):
+ return ""
+
+ def deleteJournalQuery(self, *v):
+ return ""
+
+ def childQuery(self, mode, *v):
+ return ""
+
+ def childJournalQuery(self, mode, *v):
+ return ""
+
+
+class MySQLDB(QueryInterface):
+ def __init__(self, database):
+ QueryInterface.__init__(self, database)
+
+ def getTables(self):
+ tmp_tables = []
+ if not self._database.beginQuery("show tables"):
+ return tmp_tables
+
+ while self._database.fetchRow():
+ tmp_tables.append(self._database.getRowFieldString(0))
+
+ self._database.endQuery()
+ return tmp_tables
+
+ def deleteObjectQuery(self, *v):
+ if v[0]:
+ q = "delete " + v[0] + " from " + ", ".join(v) + " where " + \
+ v[0] + "._oid=" + v[1] + "._oid and "
+ else:
+ q = "delete " + v[1] + " from " + ", ".join(v[1:]) + " where "
+
+ for i in range(1, len(v)-1):
+ if i > 1:
+ q += " and "
+ q += v[i] + "._oid=" + v[i+1] + "._oid"
+
+ return q
+
+ def deleteJournalQuery(self, *v):
+ q = "delete JournalEntry from JournalEntry, " + ", ".join(v) + " where " + \
+ v[0] + "._oid=" + v[1] + "._oid"
+
+ for i in range(1, len(v)-1):
+ q += " and " + v[i] + "._oid=" + v[i+1] + "._oid"
+
+ q += " and JournalEntry.objectID=PublicObject.publicID"
+
+ return q
+
+ def childQuery(self, mode, *v):
+ if v[0]:
+ if mode == "delete":
+ q = "delete " + v[0]
+ elif mode == "count":
+ q = "select count(*)"
+ else:
+ return ""
+
+ q += " from " + ", ".join(v) + " where " + \
+ v[0] + "._oid=" + v[1] + "._oid and "
+ else:
+ if mode == "delete":
+ q = "delete " + v[1]
+ elif mode == "count":
+ q = "select count(*)"
+ else:
+ return ""
+
+ q += " from " + ", ".join(v[1:]) + " where "
+
+ for i in range(1, len(v)-1):
+ if i > 1:
+ q += " and "
+ q += v[i] + "._parent_oid=" + v[i+1] + "._oid"
+
+ return q
+
+ def childJournalQuery(self, mode, *v):
+ if v[0]:
+ if mode == "delete":
+ q = "delete JournalEntry"
+ elif mode == "count":
+ q = "select count(*)"
+ else:
+ return ""
+
+ q += " from JournalEntry, " + ", ".join(v) + " where " + \
+ v[0] + "._oid=" + v[1] + "._oid and "
+ else:
+ if mode == "delete":
+ q = "delete " + v[1]
+ elif mode == "count":
+ q = "select count(*)"
+ else:
+ return ""
+
+ q += " from JournalEntry, " + ", ".join(v[1:]) + " where "
+
+ for i in range(1, len(v)-1):
+ if i > 1:
+ q += " and "
+ q += v[i] + "._parent_oid=" + v[i+1] + "._oid"
+
+ q += " and JournalEntry.objectID=PublicObject.publicID"
+ return q
+
+
+class PostgresDB(QueryInterface):
+ def __init__(self, database):
+ QueryInterface.__init__(self, database)
+
+ def getTables(self):
+ tmp_tables = []
+ if not self._database.beginQuery("SELECT table_name FROM information_schema.tables WHERE table_type = 'BASE TABLE' AND table_schema NOT IN ('pg_catalog', 'information_schema');"):
+ return tmp_tables
+
+ while self._database.fetchRow():
+ tmp_tables.append(self._database.getRowFieldString(0))
+
+ self._database.endQuery()
+ return tmp_tables
+
+ def deleteObjectQuery(self, *v):
+ if v[0]:
+ q = "delete from " + v[0] + " using " + ", ".join(v[1:]) + " where " + \
+ v[0] + "._oid=" + v[1] + "._oid and "
+ else:
+ q = "delete from " + v[1] + " using " + \
+ ", ".join(v[2:]) + " where "
+
+ for i in range(1, len(v)-1):
+ if i > 1:
+ q += " and "
+ q += v[i] + "._oid=" + v[i+1] + "._oid"
+
+ return q
+
+ def deleteJournalQuery(self, *v):
+ q = "delete from JournalEntry using " + ", ".join(v) + " where " + \
+ v[0] + "._oid=" + v[1] + "._oid"
+
+ for i in range(1, len(v)-1):
+ q += " and " + v[i] + "._oid=" + v[i+1] + "._oid"
+
+ q += " and JournalEntry." + \
+ self.cnvCol("objectID") + "=PublicObject." + \
+ self.cnvCol("publicID")
+
+ return q
+
+ def childQuery(self, mode, *v):
+ if v[0]:
+ if mode == "delete":
+ q = "delete from " + v[0] + " using " + ", ".join(v[1:])
+ elif mode == "count":
+ q = "select count(*) from " + ", ".join(v)
+ else:
+ return ""
+
+ q += " where " + \
+ v[0] + "._oid=" + v[1] + "._oid and "
+ else:
+ if mode == "delete":
+ q = "delete from " + v[1] + " using " + ", ".join(v[2:])
+ elif mode == "count":
+ q = "select count(*) from " + ", ".join(v[1:])
+ else:
+ return ""
+
+ q += " where "
+
+ for i in range(1, len(v)-1):
+ if i > 1:
+ q += " and "
+ q += v[i] + "._parent_oid=" + v[i+1] + "._oid"
+
+ return q
+
+ def childJournalQuery(self, mode, *v):
+ if v[0]:
+ if mode == "delete":
+ q = "delete from JournalEntry using "
+ elif mode == "count":
+ q = "select count(*) from "
+ else:
+ return ""
+
+ q += ", ".join(v) + " where " + \
+ v[0] + "._oid=" + v[1] + "._oid and "
+ else:
+ if mode == "delete":
+ q = "delete from " + v[1] + " using "
+ elif mode == "count":
+ q = "select count(*) from "
+ else:
+ return ""
+
+ q += " JournalEntry, " + ", ".join(v[1:]) + " where "
+
+ for i in range(1, len(v)-1):
+ if i > 1:
+ q += " and "
+ q += v[i] + "._parent_oid=" + v[i+1] + "._oid"
+
+ q += " and JournalEntry." + \
+ self.cnvCol("objectID") + "=PublicObject." + \
+ self.cnvCol("publicID")
+ return q
+
+
+class DBCleaner(seiscomp.client.Application):
+ def __init__(self, argc, argv):
+ seiscomp.client.Application.__init__(self, argc, argv)
+
+ self.setMessagingEnabled(False)
+ self.setDatabaseEnabled(True, True)
+ self.setDaemonEnabled(False)
+
+ self._daysToKeep = None
+ self._hoursToKeep = None
+ self._minutesToKeep = None
+ self._datetime = None
+ self._invertMode = False
+ self._stripEP = True
+ self._stripQC = True
+
+ self._steps = 0
+ self._currentStep = 0
+ self._keepEvents = []
+
+ self._timer = seiscomp.utils.StopWatch()
+
+ def createCommandLineDescription(self):
+ try:
+ try:
+ self.commandline().addGroup("Mode")
+ self.commandline().addOption("Mode", "check", "Checks if "
+ "unreachable objects exist.")
+ self.commandline().addOption("Mode", "clean-unused",
+ "Remove all unreachable objects "
+ "when in checkmode. Default: off.")
+
+ self.commandline().addGroup("Objects")
+
+ self.commandline().addOption("Objects", "ep-only,E",
+ "Strip only event parameters"
+ " but no waveform QC.")
+ self.commandline().addStringOption("Objects", "keep-events",
+ "Event-IDs to keep in the "
+ "database. Combining with"
+ "'qc-only' is invalld.")
+ self.commandline().addOption("Objects", "qc-only,Q",
+ "Strip only waveform QC but no "
+ "event parameters. Combining with"
+ "'ep-only' is invalld.")
+
+ self.commandline().addGroup("Timespan")
+ self.commandline().addStringOption("Timespan", "datetime",
+ "Specify the datetime (UTC)"
+ " from which to keep all "
+ "events. If given, days, "
+ "minutes and hours are ignored. "
+ "Format: '%Y-%m-%d %H:%M:%S'.")
+ self.commandline().addIntOption("Timespan", "days",
+ "The number of days to keep. "
+ "Added to hours and minutes. "
+ "Default is 30 if no other "
+ "times are given.")
+ self.commandline().addIntOption("Timespan", "hours",
+ "The number of hours to keep. "
+ "Added to days and minutes.")
+ self.commandline().addIntOption("Timespan", "minutes",
+ "The number of minutes to keep. "
+ "Added to days and hours.")
+ self.commandline().addOption("Timespan", "invert,i",
+ "Delete all parameters after the "
+ "specified time period. If not "
+ "given, parameter from before are"
+ " deleted.")
+
+ except RuntimeError:
+ seiscomp.logging.warning(
+ "caught unexpected error %s" % sys.exc_info())
+ return True
+ except RuntimeError:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+ sys.exit(-1)
+
+ def initConfiguration(self):
+ try:
+ if not seiscomp.client.Application.initConfiguration(self):
+ return False
+ try:
+ self._invertMode = self.configGetBool(
+ "database.cleanup.invertMode")
+ except RuntimeError:
+ pass
+
+ try:
+ if self.configGetBool("database.cleanup.eventParameters"):
+ self._stripEP = True
+ else:
+ self._stripEP = False
+ except RuntimeError:
+ pass
+
+ try:
+ if self.configGetBool("database.cleanup.qualityControl"):
+ self._stripQC = True
+ else:
+ self._stripQC = False
+ except RuntimeError:
+ pass
+
+ try:
+ self._daysToKeep = self.configGetInt(
+ "database.cleanup.keep.days")
+ except RuntimeError:
+ pass
+
+ try:
+ self._hoursToKeep = self.configGetInt(
+ "database.cleanup.keep.hours")
+ except RuntimeError:
+ pass
+
+ try:
+ self._minutesToKeep = self.configGetInt(
+ "database.cleanup.keep.minutes")
+ except RuntimeError:
+ pass
+
+ return True
+
+ except RuntimeError:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+ sys.exit(-1)
+
+
+ def printUsage(self):
+
+ print('''Usage:
+ scbstrip [options]
+
+Remove event and waveform quality parameters from the database in a timespan.''')
+
+ seiscomp.client.Application.printUsage(self)
+
+ print('''Examples:
+Remove all event and waveform QC paramters older than 30 days
+ scdbstrip -d mysql://sysop:sysop@localhost/seiscomp --days 30
+''')
+
+ def validateParameters(self):
+ if not seiscomp.client.Application.validateParameters(self):
+ return False
+
+ try:
+ try:
+ self._daysToKeep = self.commandline().optionInt("days")
+ except RuntimeError:
+ pass
+
+ try:
+ self._hoursToKeep = self.commandline().optionInt("hours")
+ except RuntimeError:
+ pass
+
+ try:
+ self._minutesToKeep = self.commandline().optionInt("minutes")
+ except RuntimeError:
+ pass
+
+ if self.commandline().hasOption("invert"):
+ self._invertMode = True
+
+ epOnly = False
+ if self.commandline().hasOption("ep-only"):
+ self._stripEP = True
+ self._stripQC = False
+ epOnly = True
+
+ if self.commandline().hasOption("qc-only"):
+ if epOnly:
+ error.write("ERROR: Option '--qc-only' conflicts with "
+ "'--ep-only'\n")
+ return False
+ else:
+ self._stripEP = False
+ self._stripQC = True
+
+ if not self._stripEP and not self._stripQC:
+ error.write("[INFO] Event and QC parameters are disregarded by"
+ " configuration\n")
+ return False
+ try:
+ eventIDs = self.commandline().optionString("keep-events")
+ self._keepEvents = [id.strip() for id in eventIDs.split(',')]
+ except RuntimeError:
+ pass
+
+ try:
+ dateTime = self.commandline().optionString("datetime")
+ except RuntimeError:
+ dateTime = None
+
+ if dateTime:
+ self._daysToKeep = None
+ self._hoursToKeep = None
+ self._minutesToKeep = None
+
+ date = seiscomp.core.Time()
+ try:
+ if date.fromString(dateTime, "%Y-%m-%d %H:%M:%S"):
+ error.write("Using datetime option: %s\n" %
+ date.toString("%Y-%m-%d %H:%M:%S"))
+ self._datetime = date
+ else:
+ error.write("ERROR: datetime has wrong format\n")
+ return False
+ except ValueError:
+ pass
+
+ # fall back to default if no times are given
+ if (self._daysToKeep is None and dateTime is None and
+ self._hoursToKeep is None and self._minutesToKeep is None):
+ self._daysToKeep = 30
+
+ return True
+
+ except RuntimeError:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+ sys.exit(-1)
+
+ def run(self):
+ classname = self.database().className()
+ if re.search('postgres', classname, re.IGNORECASE):
+ self._query = PostgresDB(self.database())
+ elif re.search('mysql', classname, re.IGNORECASE):
+ self._query = MySQLDB(self.database())
+ else:
+ output.write(
+ "Error: Database interface %s is not supported\n" % (classname))
+ output.flush()
+ return False
+
+ try:
+ self._timer.restart()
+
+ if self.commandline().hasOption("check"):
+ return self.check()
+
+ return self.clean()
+ except RuntimeError:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+ sys.exit(-1)
+
+ def checkTable(self, table):
+ self.runCommand(
+ "update tmp_object set used=1 where _oid in (select _oid from %s)"
+ % table)
+
+ def check(self):
+ try:
+ if self._datetime is None:
+ timeSpan = seiscomp.core.TimeSpan(0)
+ if self._daysToKeep:
+ timeSpan += seiscomp.core.TimeSpan(self._daysToKeep*24*3600)
+
+ if self._hoursToKeep:
+ timeSpan += seiscomp.core.TimeSpan(self._hoursToKeep*3600)
+
+ if self._minutesToKeep:
+ timeSpan += seiscomp.core.TimeSpan(self._minutesToKeep*60)
+
+ # All times are given in localtime
+ timestamp = seiscomp.core.Time.LocalTime() - timeSpan
+ else:
+ timestamp = self._datetime
+
+ output.write("[INFO] Check objects older than %s\n" %
+ timestamp.toString("%Y-%m-%d %H:%M:%S"))
+
+ tables = self._query.getTables()
+ if len(tables) == 0:
+ return False
+
+ if "Object" in tables:
+ tables.remove("Object")
+ if "object" in tables:
+ tables.remove("object")
+ if "PublicObject" in tables:
+ tables.remove("PublicObject")
+ if "publicobject" in tables:
+ tables.remove("publicobject")
+ if "Meta" in tables:
+ tables.remove("Meta")
+ if "meta" in tables:
+ tables.remove("meta")
+
+ self._steps = len(tables) + 1
+
+ if self.commandline().hasOption("clean-unused"):
+ self._steps = self._steps + 1
+
+ # Skip the first 5 objects id' that are reserved for metaobjects
+ # (Config, QualityControl, inventory, EventParameters, routing)
+ tmp_object = "\
+ create temporary table tmp_object as \
+ select _oid, 0 as used from Object where _oid > 5 and _timestamp < '%s'\
+ " % timestamp.toString("%Y-%m-%d %H:%M:%S")
+
+ self.beginMessage("Search objects")
+ if not self.runCommand(tmp_object):
+ return False
+ self.endMessage(self.globalCount("tmp_object"))
+
+ for table in tables:
+ self.beginMessage("Check table %s" % table)
+ self.checkTable(table)
+ self.endMessage(self.usedCount("tmp_object"))
+
+ unusedObjects = self.unusedCount("tmp_object")
+
+ if self.commandline().hasOption("clean-unused"):
+ self.delete("Remove unreachable objects",
+ self.deleteUnusedRawObjects, "tmp_object")
+
+ self.beginMessage("%d unused objects found" % unusedObjects)
+ if not self.runCommand("drop table tmp_object"):
+ return False
+ self.endMessage()
+
+ return True
+
+ except RuntimeException as e:
+ error.write("\nException: %s\n" % str(e))
+ return False
+
+ except:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+ sys.exit(-1)
+
+ def clean(self):
+ try:
+ if self._datetime is None:
+ timeSpan = seiscomp.core.TimeSpan(0)
+ if self._daysToKeep:
+ timeSpan += seiscomp.core.TimeSpan(self._daysToKeep*24*3600)
+
+ if self._hoursToKeep:
+ timeSpan += seiscomp.core.TimeSpan(self._hoursToKeep*3600)
+
+ if self._minutesToKeep:
+ timeSpan += seiscomp.core.TimeSpan(self._minutesToKeep*60)
+
+ # All times are given in GMT (UTC)
+ timestamp = seiscomp.core.Time.GMT() - timeSpan
+ else:
+ timestamp = self._datetime
+
+ if not self._invertMode:
+ output.write("[INFO] Keep objects after %s UTC\n" %
+ timestamp.toString("%Y-%m-%d %H:%M:%S"))
+ else:
+ output.write("[INFO] Keep objects before %s UTC\n" %
+ timestamp.toString("%Y-%m-%d %H:%M:%S"))
+
+ if len(self._keepEvents) > 0:
+ output.write("[INFO] Keep events in db: %s\n" %
+ ",".join(self._keepEvents))
+
+ op = '<'
+ if self._invertMode:
+ op = '>='
+
+ self._steps = 32
+
+ # treat QC entries
+ if self._stripQC:
+ self.beginMessage("Deleting waveform quality parameters")
+ if not self.runCommand(
+ self._query.deleteObjectQuery("Object", "WaveformQuality")
+ + "WaveformQuality.%s %s '%s'" %
+ (self.cnvCol("end"), op,
+ timestamp.toString("%Y-%m-%d %H:%M:%S"))):
+ return False
+ if not self.runCommand("delete from WaveformQuality where WaveformQuality.%s %s '%s'" % (self.cnvCol("end"), op, timestamp.toString("%Y-%m-%d %H:%M:%S"))):
+ return False
+ self.endMessage()
+
+ if not self._stripEP:
+ return True
+
+ # treat event parameters
+ old_events = "\
+ create temporary table old_events as \
+ select Event._oid, PEvent.%s \
+ from Event, PublicObject as PEvent, Origin, PublicObject as POrigin \
+ where Event._oid=PEvent._oid and \
+ Origin._oid=POrigin._oid and \
+ Event.%s=POrigin.%s and \
+ Origin.%s %s '%s'\
+ " % (self.cnvCol("publicID"), self.cnvCol("preferredOriginID"), self.cnvCol("publicID"), self.cnvCol("time_value"), op, timestamp.toString("%Y-%m-%d %H:%M:%S"))
+
+ if len(self._keepEvents) > 0:
+ old_events += " and PEvent." + \
+ self.cnvCol("publicID") + \
+ " not in ('%s')" % "','".join(self._keepEvents)
+
+ self.beginMessage("Find old events")
+ if not self.runCommand(old_events):
+ return False
+ self.endMessage(self.globalCount("old_events"))
+
+ # Delete OriginReferences of old events
+ self.delete("Delete origin references of old events",
+ self.deleteChilds, "OriginReference", "old_events")
+
+ # Delete FocalMechanismReference of old events
+ self.delete("Delete focal mechanism references of old events",
+ self.deleteChilds, "FocalMechanismReference",
+ "old_events")
+
+ # Delete EventDescription of old events
+ self.delete("Delete event descriptions of old events",
+ self.deleteChilds, "EventDescription", "old_events")
+
+ # Delete Comments of old events
+ self.delete("Delete comments of old events",
+ self.deleteChilds, "Comment", "old_events")
+
+ # Delete old events
+ self.delete("Delete old events", self.deleteObjects,
+ "Event", "old_events")
+
+ self.beginMessage("Cleaning up temporary results")
+ if not self.runCommand("drop table old_events"):
+ return False
+ self.endMessage()
+
+ tmp_fm = "\
+ create temporary table tmp_fm as \
+ select FocalMechanism._oid, PFM.%s, 0 as used \
+ from PublicObject as PFM, FocalMechanism \
+ where PFM._oid=FocalMechanism._oid\
+ " % (self.cnvCol("publicID"))
+
+ self.beginMessage("Find unassociated focal mechanisms")
+
+ if not self.runCommand(tmp_fm):
+ return False
+
+ tmp_fm = "\
+ update tmp_fm set used=1 \
+ where " + self.cnvCol("publicID") + " in (select distinct " + self.cnvCol("focalMechanismID") + " from FocalMechanismReference) \
+ "
+
+ if not self.runCommand(tmp_fm):
+ return False
+
+ self.endMessage(self.unusedCount("tmp_fm"))
+
+ # Delete Comments of unassociated focal mechanisms
+ self.delete("Delete comments of unassociation focal mechanisms",
+ self.deleteUnusedChilds, "Comment", "tmp_fm")
+
+ # Delete MomentTensor.Comments of unassociated focal mechanisms
+ self.delete("Delete moment tensor comments of unassociated focal mechanisms",
+ self.deleteUnusedChilds, "Comment", "MomentTensor",
+ "tmp_fm")
+
+ # Delete MomentTensor.DataUsed of unassociated focal mechanisms
+ self.delete("Delete moment tensor data of unassociated focal mechanisms",
+ self.deleteUnusedChilds, "DataUsed", "MomentTensor",
+ "tmp_fm")
+
+ # Delete MomentTensor.PhaseSetting of unassociated focal mechanisms
+ self.delete("Delete moment tensor phase settings of unassociated focal mechanisms",
+ self.deleteUnusedChilds, "MomentTensorPhaseSetting",
+ "MomentTensor", "tmp_fm")
+
+ # Delete MomentTensor.StationContribution.ComponentContribution of unassociated focal mechanisms
+ self.delete("Delete moment tensor component contributions of unassociated focal mechanisms",
+ self.deleteUnusedChilds,
+ "MomentTensorComponentContribution",
+ "MomentTensorStationContribution", "MomentTensor",
+ "tmp_fm")
+
+ # Delete MomentTensor.StationContributions of unassociated focal mechanisms
+ self.delete("Delete moment tensor station contributions of unassociated focal mechanisms",
+ self.deleteUnusedPublicChilds,
+ "MomentTensorStationContribution", "MomentTensor",
+ "tmp_fm")
+
+ # Delete MomentTensors of unassociated focal mechanisms
+ self.delete("Delete moment tensors of unassociated focal mechanisms",
+ self.deleteUnusedPublicChilds, "MomentTensor",
+ "tmp_fm")
+
+ # Delete FocalMechanism itself
+ self.delete("Delete unassociated focal mechanisms",
+ self.deleteUnusedObjects, "FocalMechanism", "tmp_fm")
+
+ self.beginMessage("Cleaning up temporary results")
+ if not self.runCommand("drop table tmp_fm"):
+ return False
+ self.endMessage()
+
+ tmp_origin = "\
+ create temporary table tmp_origin as \
+ select Origin._oid, POrigin.%s, 0 as used \
+ from PublicObject as POrigin, Origin \
+ where POrigin._oid=Origin._oid and \
+ Origin.%s %s '%s'\
+ " % (self.cnvCol("publicID"), self.cnvCol("time_value"), op, timestamp.toString("%Y-%m-%d %H:%M:%S"))
+
+ self.beginMessage("Find unassociated origins")
+
+ if not self.runCommand(tmp_origin):
+ return False
+
+ tmp_origin = "\
+ update tmp_origin set used=1 \
+ where (" + self.cnvCol("publicID") + " in (select distinct " + self.cnvCol("originID") + " from OriginReference)) \
+ or (" + self.cnvCol("publicID") + " in (select " + self.cnvCol("derivedOriginID") + " from MomentTensor))"
+
+ if not self.runCommand(tmp_origin):
+ return False
+
+ self.endMessage(self.unusedCount("tmp_origin"))
+
+ # Delete Arrivals of unassociated origins
+ self.delete("Delete unassociated arrivals",
+ self.deleteUnusedChilds, "Arrival", "tmp_origin")
+
+ # Delete StationMagnitudes of unassociated origins
+ self.delete("Delete unassociated station magnitudes",
+ self.deleteUnusedPublicChilds, "StationMagnitude",
+ "tmp_origin")
+
+ # Delete StationMagnitudeContributions of unassociated origins
+ self.delete("Delete unassociated station magnitude contributions",
+ self.deleteUnusedChilds,
+ "StationMagnitudeContribution", "Magnitude",
+ "tmp_origin")
+
+ # Delete Magnitudes of unassociated origins
+ self.delete("Delete unassociated magnitudes",
+ self.deleteUnusedPublicChilds, "Magnitude",
+ "tmp_origin")
+
+ # Delete Comments of unassociated origins
+ self.delete("Delete comments of unassociation origins",
+ self.deleteUnusedChilds, "Comment", "tmp_origin")
+
+ # Delete CompositeTimes of unassociated origins
+ self.delete("Delete composite times of unassociation origins",
+ self.deleteUnusedChilds, "CompositeTime", "tmp_origin")
+
+ # Delete Origins itself
+ self.delete("Delete unassociated origins",
+ self.deleteUnusedObjects, "Origin", "tmp_origin")
+
+ self.beginMessage("Cleaning up temporary results")
+ if not self.runCommand("drop table tmp_origin"):
+ return False
+ self.endMessage()
+
+ # Delete all unassociated picks (via arrivals)
+
+ self.beginMessage("Find unassociated picks")
+
+ tmp_pick = "\
+ create temporary table tmp_pick as \
+ select Pick._oid, PPick.%s, 0 as used \
+ from PublicObject as PPick, Pick \
+ where PPick._oid=Pick._oid and \
+ Pick.%s %s '%s' \
+ " % (self.cnvCol("publicID"), self.cnvCol("time_value"), op, timestamp.toString("%Y-%m-%d %H:%M:%S"))
+
+ if not self.runCommand(tmp_pick):
+ return False
+
+ tmp_pick = "\
+ update tmp_pick set used=1 \
+ where " + self.cnvCol("publicID") + " in \
+ (select distinct " + self.cnvCol("pickID") + " from Arrival) \
+ "
+
+ if not self.runCommand(tmp_pick):
+ return False
+
+ self.endMessage(self.unusedCount("tmp_pick"))
+
+ self.delete("Delete unassociated picks",
+ self.deleteUnusedObjects, "Pick", "tmp_pick")
+
+ self.beginMessage("Cleaning up temporary results")
+ if not self.runCommand("drop table tmp_pick"):
+ return False
+ self.endMessage()
+
+ # Delete all unassociated amplitudes (via stationmagnitudes)
+
+ self.beginMessage("Find unassociated amplitudes")
+
+ tmp_amp = "\
+ create temporary table tmp_amp as \
+ select Amplitude._oid, PAmplitude.%s, 0 as used \
+ from PublicObject as PAmplitude, Amplitude \
+ where PAmplitude._oid=Amplitude._oid and \
+ Amplitude.%s %s '%s' \
+ " % (self.cnvCol("publicID"), self.cnvCol("timeWindow_reference"), op, timestamp.toString("%Y-%m-%d %H:%M:%S"))
+
+ if not self.runCommand(tmp_amp):
+ return False
+
+ tmp_amp = "\
+ update tmp_amp set used=1 \
+ where " + self.cnvCol("publicID") + " in \
+ (select distinct " + self.cnvCol("amplitudeID") + " from StationMagnitude) \
+ "
+
+ if not self.runCommand(tmp_amp):
+ return False
+
+ self.endMessage(self.unusedCount("tmp_amp"))
+
+ self.delete("Delete unassociated station amplitudes",
+ self.deleteUnusedObjects, "Amplitude", "tmp_amp")
+
+ self.beginMessage("Cleaning up temporary results")
+ if not self.runCommand("drop table tmp_amp"):
+ return False
+ self.endMessage()
+
+ return True
+
+ except RuntimeException as e:
+ error.write("\nException: %s\n" % str(e))
+ return False
+
+ except:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+ sys.exit(-1)
+
+ def cnvCol(self, col):
+ return self.database().convertColumnName(col)
+
+ def beginMessage(self, msg):
+ output.write("[%3d%%] " % (self._currentStep*100/self._steps))
+ output.write(msg + "...")
+ output.flush()
+ self._currentStep = self._currentStep + 1
+
+ def endMessage(self, count=None):
+ if not count is None:
+ output.write("done (%d)" % count)
+ else:
+ output.write("done")
+
+ span = self._timer.elapsed().seconds()
+ output.write(", time spent: %d %02d:%02d:%02d\n" % (
+ span / 86400, (span % 86400) / 3600, (span % 3600) / 60, span % 60))
+
+ def runCommand(self, q):
+ if self.isExitRequested():
+ raise ExitRequestException
+
+ if not self.database().execute(q):
+ raise RuntimeException("ERROR: command '%s' failed\n" % q)
+
+ if self.isExitRequested():
+ raise ExitRequestException
+
+ return True
+
+ def runQuery(self, q):
+ if self.isExitRequested():
+ raise ExitRequestException
+
+ count = "-1"
+
+ if not self.database().beginQuery(q):
+ raise RuntimeException("ERROR: command '%s' failed\n" % q)
+
+ if self.database().fetchRow():
+ count = self.database().getRowFieldString(0)
+
+ self.database().endQuery()
+
+ if self.isExitRequested():
+ raise ExitRequestException
+
+ return [count]
+
+ def globalCount(self, table):
+ return int(self.runQuery("select count(*) from %s" % table)[0])
+
+ def usedCount(self, table):
+ return int(self.runQuery("select count(*) from %s where used=1" % table)[0])
+
+ def unusedCount(self, table):
+ return int(self.runQuery("select count(*) from %s where used=0" % table)[0])
+
+ def deleteChilds(self, *v):
+ count = int(self.runQuery(
+ self._query.childQuery("count", "Object", *v))[0])
+ self.runCommand(self._query.childQuery("delete", "Object", *v))
+ self.runCommand(self._query.childQuery("delete", None, *v))
+ return count
+
+ def deleteUnusedChilds(self, *v):
+ count = int(self.runQuery(self._query.childQuery(
+ "count", "Object", *v) + " and used=0")[0])
+ self.runCommand(self._query.childQuery(
+ "delete", "Object", *v) + " and used=0")
+ self.runCommand(self._query.childQuery(
+ "delete", None, *v) + " and used=0")
+ return count
+
+ def deleteUnusedPublicChilds(self, *v):
+ count = int(self.runQuery(self._query.childQuery(
+ "count", "Object", *v) + " and used=0")[0])
+ self.runCommand(self._query.childJournalQuery(
+ "delete", "PublicObject", *v) + " and used=0")
+ self.runCommand(self._query.childQuery(
+ "delete", "Object", *v) + " and used=0")
+ self.runCommand(self._query.childQuery(
+ "delete", "PublicObject", *v) + " and used=0")
+ self.runCommand(self._query.childQuery(
+ "delete", None, *v) + " and used=0")
+ return count
+
+ def deleteUnusedRawObjects(self, *v):
+ self.runCommand(self._query.deleteJournalQuery(
+ "PublicObject", *v) + " and used=0")
+ self.runCommand(self._query.deleteObjectQuery(
+ None, "Object", *v) + " and used=0")
+ self.runCommand(self._query.deleteObjectQuery(
+ None, "PublicObject", *v) + " and used=0")
+ return None
+
+ def deleteObjects(self, *v):
+ self.runCommand(self._query.deleteJournalQuery("PublicObject", *v))
+ self.runCommand(self._query.deleteObjectQuery("Object", *v))
+ self.runCommand(self._query.deleteObjectQuery("PublicObject", *v))
+ self.runCommand(self._query.deleteObjectQuery(None, *v))
+ return None
+
+ def deleteUnusedObjects(self, *v):
+ self.runCommand(self._query.deleteJournalQuery(
+ "PublicObject", *v) + " and used=0")
+ self.runCommand(self._query.deleteObjectQuery(
+ "Object", *v) + " and used=0")
+ self.runCommand(self._query.deleteObjectQuery(
+ "PublicObject", *v) + " and used=0")
+ self.runCommand(self._query.deleteObjectQuery(
+ None, *v) + " and used=0")
+ return None
+
+ def delete(self, message, func, *v):
+ self.beginMessage(message)
+ count = func(*v)
+ self.endMessage(count)
+ return count
+
+
+app = DBCleaner(len(sys.argv), sys.argv)
+sys.exit(app())
diff --git a/bin/scdispatch b/bin/scdispatch
new file mode 100755
index 0000000..64bf33a
Binary files /dev/null and b/bin/scdispatch differ
diff --git a/bin/scdumpcfg b/bin/scdumpcfg
new file mode 100755
index 0000000..a477628
--- /dev/null
+++ b/bin/scdumpcfg
@@ -0,0 +1,238 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+from __future__ import division, print_function
+
+import sys
+import os
+import seiscomp.client
+import seiscomp.datamodel
+import seiscomp.config
+
+
+def readParams(sc_params):
+ if sc_params.baseID():
+ sc_params_base = seiscomp.datamodel.ParameterSet.Find(
+ sc_params.baseID())
+ if sc_params_base is None:
+ sys.stderr.write("Warning: %s: base parameter set for %s not found\n" % (
+ sc_params.baseID(), sc_params.publicID()))
+ params = {}
+ else:
+ params = readParams(sc_params_base)
+ else:
+ params = {}
+
+ for i in range(sc_params.parameterCount()):
+ p = sc_params.parameter(i)
+ params[p.name()] = p.value()
+
+ return params
+
+
+class DumpCfg(seiscomp.client.Application):
+ def __init__(self, argc, argv):
+ if argc < 2:
+ sys.stderr.write("scdumpcfg {modname} [options]\n")
+ raise RuntimeError
+
+ self.appName = argv[1]
+
+ # Remove first parameter to replace appname with passed module name
+ argc = argc-1
+ argv = argv[1:]
+
+ seiscomp.client.Application.__init__(self, argc, argv)
+
+ self.setMessagingEnabled(True)
+ self.setMessagingUsername("")
+ self.setDatabaseEnabled(True, True)
+ self.setLoadConfigModuleEnabled(True)
+ self.setDaemonEnabled(False)
+
+ def createCommandLineDescription(self):
+ self.commandline().addGroup("Dump")
+ self.commandline().addStringOption("Dump", "param,P",
+ "Specify parameter name to filter for.")
+ self.commandline().addOption("Dump", "bindings,B",
+ "Dump bindings instead of module configuration.")
+ self.commandline().addOption("Dump", "allow-global,G",
+ "Print global bindings if no module binding is avaible.")
+ self.commandline().addOption("Dump", "cfg",
+ "Print output in .cfg format.")
+ self.commandline().addOption("Dump", "nslc",
+ "Print the list of streams which have bindings of the given module.")
+
+ def validateParameters(self):
+ if not seiscomp.client.Application.validateParameters(self):
+ return False
+
+ self.dumpBindings = self.commandline().hasOption("bindings")
+
+ try:
+ self.param = self.commandline().optionString("param")
+ except:
+ self.param = None
+
+ self.allowGlobal = self.commandline().hasOption("allow-global")
+ self.formatCfg = self.commandline().hasOption("cfg")
+ self.nslc = self.commandline().hasOption("nslc")
+
+ if not self.dumpBindings:
+ self.setMessagingEnabled(False)
+ self.setDatabaseEnabled(False, False)
+ self.setLoadConfigModuleEnabled(False)
+
+ return True
+
+ def initConfiguration(self):
+ if self.appName == "-h" or self.appName == "--help":
+ self.printUsage()
+ return False
+
+ return seiscomp.client.Application.initConfiguration(self)
+
+ # Do nothing.
+ def initSubscriptions(self):
+ return True
+
+ def printUsage(self):
+
+ print('''Usage:
+ {} [options]
+
+Dump bindings or module configurations used by a specific module or global for
+particular stations.'''.format(os.path.basename(__file__)), file=sys.stderr)
+
+ seiscomp.client.Application.printUsage(self)
+
+ print('''Examples:
+Dump global bindings configuration for all stations
+ {} global -d localhost -B > config.xml
+'''.format(os.path.basename(__file__)), file=sys.stderr)
+
+
+ def run(self):
+ cfg = self.configuration()
+ if self.nslc:
+ nslc = set()
+
+ if not self.dumpBindings:
+ symtab = cfg.symbolTable()
+ names = cfg.names()
+ count = 0
+ for name in names:
+ if self.param and self.param != name:
+ continue
+ sym = symtab.get(name)
+ if self.formatCfg:
+ if sym.comment:
+ if count > 0:
+ sys.stdout.write("\n")
+ sys.stdout.write("%s\n" % sym.comment)
+ sys.stdout.write("%s = %s\n" % (sym.name, sym.content))
+ else:
+ sys.stdout.write("%s\n" % sym.name)
+ sys.stdout.write(" value(s) : %s\n" %
+ ", ".join(sym.values))
+ sys.stdout.write(" source : %s\n" % sym.uri)
+ count = count + 1
+
+ if self.param and count == 0:
+ sys.stderr.write("%s: definition not found\n." % self.param)
+ else:
+ cfg = self.configModule()
+ if cfg is None:
+ sys.stderr.write("No config module read\n")
+ return False
+
+ tmp = {}
+ for i in range(cfg.configStationCount()):
+ cfg_sta = cfg.configStation(i)
+ tmp[(cfg_sta.networkCode(), cfg_sta.stationCode())] = cfg_sta
+
+ name = self.name()
+ # For backward compatibility rename global to default
+ if name == "global":
+ name = "default"
+
+ for item in sorted(tmp.keys()):
+ cfg_sta = tmp[item]
+ sta_enabled = cfg_sta.enabled()
+ cfg_setup = seiscomp.datamodel.findSetup(
+ cfg_sta, name, self.allowGlobal)
+
+ if not cfg_setup is None:
+ suffix = ""
+ if sta_enabled and cfg_setup.enabled():
+ out = "+ "
+ else:
+ suffix = " ("
+ if not sta_enabled:
+ suffix += "station disabled"
+ if not cfg_setup.enabled():
+ if suffix:
+ suffix += ", "
+ suffix += "setup disabled"
+ suffix += ")"
+ out = "- "
+ out += "%s.%s%s\n" % (cfg_sta.networkCode(),
+ cfg_sta.stationCode(), suffix)
+ params = seiscomp.datamodel.ParameterSet.Find(
+ cfg_setup.parameterSetID())
+ if params is None:
+ sys.stderr.write(
+ "ERROR: %s: ParameterSet not found\n" %
+ cfg_setup.parameterSetID())
+ return False
+
+ params = readParams(params)
+ if self.nslc:
+ try:
+ sensorLocation = params["detecLocid"]
+ except:
+ sensorLocation = ""
+ try:
+ detecStream = params["detecStream"]
+ except:
+ detecStream = ""
+
+ stream = "%s.%s.%s.%s" % \
+ (cfg_sta.networkCode(), cfg_sta.stationCode(),
+ sensorLocation, detecStream)
+ nslc.add(stream)
+ count = 0
+ for param_name in sorted(params.keys()):
+ if self.param and self.param != param_name:
+ continue
+ out += " %s: %s\n" % (param_name, params[param_name])
+ count = count + 1
+
+ if not self.nslc and count > 0:
+ sys.stdout.write(out)
+
+ if self.nslc:
+ for stream in sorted(nslc):
+ print(stream, file=sys.stdout)
+
+ return True
+
+
+try:
+ app = DumpCfg(len(sys.argv), sys.argv)
+except:
+ sys.exit(1)
+
+sys.exit(app())
diff --git a/bin/scdumpobject b/bin/scdumpobject
new file mode 100755
index 0000000..c272dbc
--- /dev/null
+++ b/bin/scdumpobject
@@ -0,0 +1,75 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+import sys
+import seiscomp.client, seiscomp.datamodel, seiscomp.io
+
+
+class ObjectDumper(seiscomp.client.Application):
+
+ def __init__(self):
+ seiscomp.client.Application.__init__(self, len(sys.argv), sys.argv)
+ self.setMessagingEnabled(True)
+ self.setDatabaseEnabled(True, False)
+ self.setMessagingUsername("")
+
+ def createCommandLineDescription(self):
+ seiscomp.client.Application.createCommandLineDescription(self)
+ self.commandline().addGroup("Dump")
+ self.commandline().addStringOption("Dump", "public-id,P", "publicID")
+
+ def loadEventParametersObject(self, publicID):
+ for tp in \
+ seiscomp.datamodel.Pick, seiscomp.datamodel.Amplitude, seiscomp.datamodel.Origin, \
+ seiscomp.datamodel.Event, seiscomp.datamodel.FocalMechanism, \
+ seiscomp.datamodel.Magnitude, seiscomp.datamodel.StationMagnitude:
+
+ obj = self.query().loadObject(tp.TypeInfo(), publicID)
+ obj = tp.Cast(obj)
+ if obj:
+ ep = seiscomp.datamodel.EventParameters()
+ ep.add(obj)
+ return ep
+
+ def loadInventoryObject(self, publicID):
+ for tp in \
+ seiscomp.datamodel.Network, seiscomp.datamodel.Station, seiscomp.datamodel.Sensor, \
+ seiscomp.datamodel.SensorLocation, seiscomp.datamodel.Stream:
+
+ obj = self.query().loadObject(tp.TypeInfo(), publicID)
+ obj = tp.Cast(obj)
+ if obj:
+ return obj
+
+ def run(self):
+ publicID = self.commandline().optionString("public-id")
+ obj = self.loadEventParametersObject(publicID)
+ if obj is None:
+ obj = self.loadInventoryObject(publicID)
+ if obj is None:
+ raise ValueError("unknown object '" + publicID + "'")
+
+ # dump formatted XML archive to stdout
+ ar = seiscomp.io.XMLArchive()
+ ar.setFormattedOutput(True)
+ ar.create("-")
+ ar.writeObject(obj)
+ ar.close()
+ return True
+
+
+if __name__ == "__main__":
+ app = ObjectDumper()
+ app()
diff --git a/bin/sceplog b/bin/sceplog
new file mode 100755
index 0000000..162534e
--- /dev/null
+++ b/bin/sceplog
@@ -0,0 +1,76 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+import sys
+import os
+import seiscomp.client
+import seiscomp.datamodel
+import seiscomp.io
+
+
+class EventParameterLog(seiscomp.client.Application):
+ def __init__(self, argc, argv):
+ seiscomp.client.Application.__init__(self, argc, argv)
+
+ self.setMessagingEnabled(True)
+ self.setDatabaseEnabled(False, False)
+ self.setMessagingUsername("")
+ self.setPrimaryMessagingGroup(
+ seiscomp.client.Protocol.LISTENER_GROUP)
+ self.addMessagingSubscription("EVENT")
+ self.addMessagingSubscription("LOCATION")
+ self.addMessagingSubscription("MAGNITUDE")
+ self.addMessagingSubscription("AMPLITUDE")
+ self.addMessagingSubscription("PICK")
+
+ self.setAutoApplyNotifierEnabled(True)
+ self.setInterpretNotifierEnabled(True)
+
+ # EventParameter object
+ self._eventParameters = seiscomp.datamodel.EventParameters()
+
+ def printUsage(self):
+
+ print('''Usage:
+ sceplog [options]
+
+Receive event parameters from messaging and write them to stdout in SCML''')
+
+ seiscomp.client.Application.printUsage(self)
+
+ print('''Examples:
+Execute sceplog with debug output
+ sceplog --debug
+''')
+
+ def run(self):
+ if not seiscomp.client.Application.run(self):
+ return False
+
+ ar = seiscomp.io.XMLArchive()
+ ar.setFormattedOutput(True)
+ if ar.create("-"):
+ ar.writeObject(self._eventParameters)
+ ar.close()
+ # Hack to avoid the "close failed in file object destructor"
+ # exception
+# print ""
+ sys.stdout.write("\n")
+
+ return True
+
+
+app = EventParameterLog(len(sys.argv), sys.argv)
+sys.exit(app())
diff --git a/bin/scesv b/bin/scesv
new file mode 100755
index 0000000..2081b0f
Binary files /dev/null and b/bin/scesv differ
diff --git a/bin/scevent b/bin/scevent
new file mode 100755
index 0000000..fcd9d1d
Binary files /dev/null and b/bin/scevent differ
diff --git a/bin/scevtlog b/bin/scevtlog
new file mode 100755
index 0000000..19a9763
--- /dev/null
+++ b/bin/scevtlog
@@ -0,0 +1,850 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+import sys
+import os
+import traceback
+import re
+import seiscomp.core
+import seiscomp.client
+import seiscomp.datamodel
+import seiscomp.io
+import seiscomp.logging
+import seiscomp.system
+
+
+def time2str(time):
+ """
+ Convert a seiscomp.core.Time to a string
+ """
+ return time.toString("%Y-%m-%d %H:%M:%S.%f000000")[:23]
+
+
+def createDirectory(dir):
+ if os.access(dir, os.W_OK):
+ return True
+
+ try:
+ os.makedirs(dir)
+ return True
+ except:
+ return False
+
+
+def originStatusToChar(org):
+ # Manual origin are always tagged as M
+ try:
+ if org.evaluationMode() == seiscomp.datamodel.MANUAL:
+ return 'M'
+ except:
+ pass
+
+ try:
+ if org.evaluationStatus() == seiscomp.datamodel.PRELIMINARY:
+ return 'P'
+ elif org.evaluationStatus() == seiscomp.datamodel.CONFIRMED or \
+ org.evaluationStatus() == seiscomp.datamodel.REVIEWED or \
+ org.evaluationStatus() == seiscomp.datamodel.FINAL:
+ return 'C'
+ elif org.evaluationStatus() == seiscomp.datamodel.REJECTED:
+ return 'X'
+ elif org.evaluationStatus() == seiscomp.datamodel.REPORTED:
+ return 'R'
+ except:
+ pass
+
+ return 'A'
+
+
+class CachePopCallback(seiscomp.datamodel.CachePopCallback):
+ def __init__(self, target):
+ seiscomp.datamodel.CachePopCallback.__init__(self)
+ self.target = target
+
+ def handle(self, obj):
+ self.target.objectAboutToPop(obj)
+
+
+class EventHistory(seiscomp.client.Application):
+ def __init__(self, argc, argv):
+ seiscomp.client.Application.__init__(self, argc, argv)
+ seiscomp.datamodel.Notifier.SetEnabled(False)
+
+ self.setMessagingEnabled(True)
+ self.setDatabaseEnabled(True, True)
+ self.setMessagingUsername("scevtlog")
+ self.setPrimaryMessagingGroup(
+ seiscomp.client.Protocol.LISTENER_GROUP)
+ self.addMessagingSubscription("EVENT")
+ self.addMessagingSubscription("LOCATION")
+ self.addMessagingSubscription("MAGNITUDE")
+
+ self.setAutoApplyNotifierEnabled(True)
+ self.setInterpretNotifierEnabled(True)
+
+ # Create a callback object that gets called when an object
+ # is going to be removed from the cache
+ self._popCallback = CachePopCallback(self)
+
+ # Create an object cache of half an hour
+ self._cache = seiscomp.datamodel.PublicObjectTimeSpanBuffer(
+ self.query(), seiscomp.core.TimeSpan(30.0*60.0))
+ self._cache.setPopCallback(self._popCallback)
+
+ # Event progress counter
+ self._eventProgress = dict()
+
+ # Event-Origin mapping
+ self._eventToOrg = dict()
+ self._orgToEvent = dict()
+
+ # Event-Magnitude mapping
+ self._eventToMag = dict()
+ self._magToEvent = dict()
+
+ self._directory = "@LOGDIR@/events"
+ self._format = "xml"
+ self._currentDirectory = ""
+ self._revisionFileExt = ".zip"
+ self._useGZIP = False
+
+ def createCommandLineDescription(self):
+ try:
+ self.commandline().addGroup("Storage")
+ self.commandline().addStringOption(
+ "Storage", "directory,o", "Specify the storage directory. "
+ "Default: @LOGDIR@/events.")
+ self.commandline().addStringOption("Storage", "format,f",
+ "Specify storage format (autoloc1, autoloc3, xml [default])")
+ except:
+ seiscomp.logging.warning(
+ "caught unexpected error %s" % sys.exc_info())
+ return True
+
+ def initConfiguration(self):
+ if not seiscomp.client.Application.initConfiguration(self):
+ return False
+
+ try:
+ self._directory = self.configGetString("directory")
+ except:
+ pass
+
+ try:
+ self._format = self.configGetString("format")
+ except:
+ pass
+
+ try:
+ if self.configGetBool("gzip"):
+ self._useGZIP = True
+ self._revisionFileExt = ".gz"
+ except:
+ pass
+
+ return True
+
+ def printUsage(self):
+ print('''Usage:
+ scevtlog [options]
+
+Save event history into files''')
+
+ seiscomp.client.Application.printUsage(self)
+
+ print('''Examples:
+Execute on command line with debug output
+ scevtlog --debug
+''')
+
+ def init(self):
+ if not seiscomp.client.Application.init(self):
+ return False
+
+ try:
+ self._directory = self.commandline().optionString("directory")
+ except:
+ pass
+
+ try:
+ self._format = self.commandline().optionString("format")
+ except:
+ pass
+
+ if self._format != "autoloc1" and self._format != "autoloc3" and self._format != "xml":
+ self._format = "xml"
+
+ try:
+ if self._directory[-1] != "/":
+ self._directory = self._directory + "/"
+ except:
+ pass
+
+ if self._directory:
+ self._directory = seiscomp.system.Environment.Instance().absolutePath(self._directory)
+ sys.stderr.write("Logging events to %s\n" % self._directory)
+
+ self._cache.setDatabaseArchive(self.query())
+ return True
+
+ # def run(self):
+ # obj = self._cache.get(seiscomp.datamodel.Magnitude, "or080221153929#16#netMag.mb")
+
+ # self.updateObject(obj)
+ # return True
+
+ def done(self):
+ seiscomp.client.Application.done(self)
+ self._cache.setDatabaseArchive(None)
+
+ def printEvent(self, evt, newEvent):
+ if self._format != "xml":
+ self.printEventProcAlert(evt, newEvent)
+ else:
+ self.printEventXML(evt, newEvent)
+ self.advanceEventProgress(evt.publicID())
+
+ def getSummary(self, time, org, mag):
+ strTime = time.toString("%Y-%m-%d %H:%M:%S")
+ summary = [strTime, "", "", "", "", "", "", "", "", ""]
+
+ if org:
+ tim = org.time().value()
+ latency = time - tim
+
+ summary[1] = "%5d.%02d" % (
+ latency.seconds() / 60, (latency.seconds() % 60) * 100 / 60)
+
+ lat = org.latitude().value()
+ lon = org.longitude().value()
+
+ dep = "%7s" % "---"
+ try:
+ dep = "%7.0f" % org.depth().value()
+ summary[4] = dep
+ except:
+ summary[4] = "%7s" % ""
+
+ phases = "%5s" % "---"
+ try:
+ phases = "%5d" % org.quality().usedPhaseCount()
+ summary[5] = phases
+ except:
+ summary[5] = "%5s" % ""
+
+ summary[2] = "%7.2f" % lat
+ summary[3] = "%7.2f" % lon
+
+ try:
+ summary[9] = originStatusToChar(org)
+ except:
+ summary[9] = "-"
+
+ if mag:
+ summary[6] = "%12s" % mag.type()
+ summary[7] = "%5.2f" % mag.magnitude().value()
+ try:
+ summary[8] = "%5d" % mag.stationCount()
+ except:
+ summary[8] = " "
+ else:
+ summary[6] = "%12s" % ""
+ summary[7] = " "
+ summary[8] = " "
+
+ return summary
+
+ def printEventProcAlert(self, evt, newEvent):
+ now = seiscomp.core.Time.GMT()
+
+ org = self._cache.get(seiscomp.datamodel.Origin,
+ evt.preferredOriginID())
+ prefmag = self._cache.get(
+ seiscomp.datamodel.Magnitude, evt.preferredMagnitudeID())
+
+ summary = self.getSummary(now, org, prefmag)
+
+ # Load arrivals
+ if org.arrivalCount() == 0:
+ self.query().loadArrivals(org)
+
+ # Load station magnitudes
+ if org.stationMagnitudeCount() == 0:
+ self.query().loadStationMagnitudes(org)
+
+ # Load magnitudes
+ if org.magnitudeCount() == 0:
+ self.query().loadMagnitudes(org)
+
+ picks = []
+ amps = []
+
+ if org:
+ narr = org.arrivalCount()
+ for i in range(narr):
+ picks.append(self._cache.get(
+ seiscomp.datamodel.Pick, org.arrival(i).pickID()))
+
+ nstamags = org.stationMagnitudeCount()
+ for i in range(nstamags):
+ amps.append(self._cache.get(
+ seiscomp.datamodel.Amplitude, org.stationMagnitude(i).amplitudeID()))
+
+ netmag = {}
+ nmag = org.magnitudeCount()
+
+ bulletin = seiscomp.scbulletin.Bulletin(None, self._format)
+ try:
+ txt = bulletin.printEvent(evt)
+ except:
+ txt = ""
+
+ if self._directory is None:
+ sys.stdout.write("%s" % ("#<\n" + txt + "#>\n"))
+ sys.stdout.flush()
+ else:
+ # Use created time to look up the proper directory
+ try:
+ arNow = evt.creationInfo().creationTime().get()
+ # Otherwise use now (in case that event.created has not been set
+ # which is always valid within the SC3 distribution
+ except:
+ arNow = now.get()
+ seiscomp.logging.error("directory is " + self._directory + "/".join(
+ ["%.2d" % i for i in arNow[1:4]]) + "/" + evt.publicID() + "/")
+
+ directory = self._directory + \
+ "/".join(["%.2d" % i for i in arNow[1:4]]) + \
+ "/" + evt.publicID() + "/"
+ if directory != self._currentDirectory:
+ if createDirectory(directory) == False:
+ seiscomp.logging.error(
+ "Unable to create directory %s" % directory)
+ return
+
+ self._currentDirectory = directory
+ self.writeLog(self._currentDirectory + self.convertID(evt.publicID()) +
+ "." + ("%06d" % self.eventProgress(evt.publicID(), directory)), txt, "w")
+ self.writeLog(self._currentDirectory +
+ self.convertID(evt.publicID()) + ".last", txt, "w")
+ self.writeLog(self._directory + "last", txt, "w")
+ self.writeLog(self._currentDirectory + self.convertID(evt.publicID()) + ".summary",
+ "|".join(summary), "a",
+ "# Layout: Timestamp, +OT (minutes, decimal), Latitude, Longitude, Depth, PhaseCount, MagType, Magnitude, MagCount")
+
+ seiscomp.logging.info("cache size = %d" % self._cache.size())
+
+ def printEventXML(self, evt, newEvent):
+ now = seiscomp.core.Time.GMT()
+
+ # Load comments
+ if evt.commentCount() == 0:
+ self.query().loadComments(evt)
+
+ # Load origin references
+ if evt.originReferenceCount() == 0:
+ self.query().loadOriginReferences(evt)
+
+ # Load event descriptions
+ if evt.eventDescriptionCount() == 0:
+ self.query().loadEventDescriptions(evt)
+
+ org = self._cache.get(seiscomp.datamodel.Origin,
+ evt.preferredOriginID())
+
+ if evt.preferredFocalMechanismID():
+ fm = self._cache.get(
+ seiscomp.datamodel.FocalMechanism, evt.preferredFocalMechanismID())
+ else:
+ fm = None
+
+ # Load comments
+ if org.commentCount() == 0:
+ self.query().loadComments(org)
+
+ # Load arrivals
+ if org.arrivalCount() == 0:
+ self.query().loadArrivals(org)
+ prefmag = self._cache.get(
+ seiscomp.datamodel.Magnitude, evt.preferredMagnitudeID())
+
+ wasEnabled = seiscomp.datamodel.PublicObject.IsRegistrationEnabled()
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
+
+ ep = seiscomp.datamodel.EventParameters()
+ evt_cloned = seiscomp.datamodel.Event.Cast(evt.clone())
+ ep.add(evt_cloned)
+
+ summary = self.getSummary(now, org, prefmag)
+
+ if fm:
+ ep.add(fm)
+
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled)
+
+ # Load focal mechainsm references
+ if evt.focalMechanismReferenceCount() == 0:
+ self.query().loadFocalMechanismReferences(evt)
+
+ # Load moment tensors
+ if fm.momentTensorCount() == 0:
+ self.query().loadMomentTensors(fm)
+
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
+
+ # Copy focal mechanism reference
+ fm_ref = evt.focalMechanismReference(
+ seiscomp.datamodel.FocalMechanismReferenceIndex(fm.publicID()))
+ if fm_ref:
+ fm_ref_cloned = seiscomp.datamodel.FocalMechanismReference.Cast(
+ fm_ref.clone())
+ if fm_ref_cloned is None:
+ fm_ref_cloned = seiscomp.datamodel.FocalMechanismReference(
+ fm.publicID())
+ evt_cloned.add(fm_ref_cloned)
+
+ nmt = fm.momentTensorCount()
+ for i in range(nmt):
+ mt = fm.momentTensor(i)
+ if not mt.derivedOriginID():
+ continue
+
+ # Origin already added
+ if ep.findOrigin(mt.derivedOriginID()) is not None:
+ continue
+
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(
+ wasEnabled)
+ derivedOrigin = self._cache.get(
+ seiscomp.datamodel.Origin, mt.derivedOriginID())
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
+
+ if derivedOrigin is None:
+ seiscomp.logging.warning(
+ "derived origin for MT %s not found" % mt.derivedOriginID())
+ continue
+
+ # Origin has been read from database -> read all childs
+ if not self._cache.cached():
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(
+ wasEnabled)
+ self.query().load(derivedOrigin)
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(
+ False)
+
+ # Add it to the event parameters
+ ep.add(derivedOrigin)
+
+ if org:
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled)
+
+ # Load magnitudes
+ if org.magnitudeCount() == 0:
+ self.query().loadMagnitudes(org)
+
+ if org.stationMagnitudeCount() == 0:
+ self.query().loadStationMagnitudes(org)
+
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
+
+ # Copy event comments
+ ncmts = evt.commentCount()
+ for i in range(ncmts):
+ cmt_cloned = seiscomp.datamodel.Comment.Cast(
+ evt.comment(i).clone())
+ evt_cloned.add(cmt_cloned)
+
+ # Copy origin references
+ org_ref = evt.originReference(
+ seiscomp.datamodel.OriginReferenceIndex(org.publicID()))
+ if org_ref:
+ org_ref_cloned = seiscomp.datamodel.OriginReference.Cast(
+ org_ref.clone())
+ if org_ref_cloned is None:
+ org_ref_cloned = seiscomp.datamodel.OriginReference(
+ org.publicID())
+ evt_cloned.add(org_ref_cloned)
+
+ # Copy event descriptions
+ for i in range(evt.eventDescriptionCount()):
+ ed_cloned = seiscomp.datamodel.EventDescription.Cast(
+ evt.eventDescription(i).clone())
+ evt_cloned.add(ed_cloned)
+
+ org_cloned = seiscomp.datamodel.Origin.Cast(org.clone())
+ ep.add(org_cloned)
+
+ # Copy origin comments
+ ncmts = org.commentCount()
+ for i in range(ncmts):
+ cmt_cloned = seiscomp.datamodel.Comment.Cast(
+ org.comment(i).clone())
+ org_cloned.add(cmt_cloned)
+
+ # Copy arrivals
+ narr = org.arrivalCount()
+ for i in range(narr):
+ arr_cloned = seiscomp.datamodel.Arrival.Cast(
+ org.arrival(i).clone())
+ org_cloned.add(arr_cloned)
+
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(
+ wasEnabled)
+ pick = self._cache.get(
+ seiscomp.datamodel.Pick, arr_cloned.pickID())
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
+
+ if pick:
+ pick_cloned = seiscomp.datamodel.Pick.Cast(pick.clone())
+ ep.add(pick_cloned)
+
+ # Copy network magnitudes
+ nmag = org.magnitudeCount()
+ for i in range(nmag):
+ mag = org.magnitude(i)
+
+ mag_cloned = seiscomp.datamodel.Magnitude.Cast(mag.clone())
+
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(
+ wasEnabled)
+ if mag.stationMagnitudeContributionCount() == 0:
+ self.query().loadStationMagnitudeContributions(mag)
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
+
+ # Copy magnitude references
+ nmagref = mag.stationMagnitudeContributionCount()
+ for j in range(nmagref):
+ mag_ref_cloned = seiscomp.datamodel.StationMagnitudeContribution.Cast(
+ mag.stationMagnitudeContribution(j).clone())
+ mag_cloned.add(mag_ref_cloned)
+
+ org_cloned.add(mag_cloned)
+
+ # Copy station magnitudes and station amplitudes
+ smag = org.stationMagnitudeCount()
+ amp_map = dict()
+ for i in range(smag):
+ mag_cloned = seiscomp.datamodel.StationMagnitude.Cast(
+ org.stationMagnitude(i).clone())
+ org_cloned.add(mag_cloned)
+ if (mag_cloned.amplitudeID() in amp_map) == False:
+ amp_map[mag_cloned.amplitudeID()] = True
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(
+ wasEnabled)
+ amp = self._cache.get(
+ seiscomp.datamodel.Amplitude, mag_cloned.amplitudeID())
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(
+ False)
+ if amp:
+ amp_cloned = seiscomp.datamodel.Amplitude.Cast(
+ amp.clone())
+ ep.add(amp_cloned)
+
+ seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled)
+
+ # archive.create(event.publicID() + )
+ ar = seiscomp.io.XMLArchive()
+ ar.setFormattedOutput(True)
+
+ if self._directory is None:
+ sys.stdout.write("#<\n")
+ ar.create("-")
+ ar.writeObject(ep)
+ ar.close()
+ sys.stdout.write("#>\n")
+ sys.stdout.flush()
+ else:
+ # Use created time to look up the proper directory
+ try:
+ arNow = evt.creationInfo().creationTime().get()
+ # Otherwise use now (in case that event.created has not been set
+ # which is always valid within the SC3 distribution
+ except:
+ arNow = now.get()
+
+ directory = self._directory + \
+ "/".join(["%.2d" % i for i in arNow[1:4]]) + \
+ "/" + evt.publicID() + "/"
+ if directory != self._currentDirectory:
+ if createDirectory(directory) == False:
+ seiscomp.logging.error(
+ "Unable to create directory %s" % directory)
+ return
+
+ self._currentDirectory = directory
+ # self.writeLog(self._currentDirectory + evt.publicID(), "#<\n" + txt + "#>\n")
+ #self.writeLog(self._currentDirectory + evt.publicID() + ".last", txt, "w")
+ ar.create(self._currentDirectory + self.convertID(evt.publicID()) + "." + ("%06d" %
+ self.eventProgress(evt.publicID(), directory)) + ".xml" + self._revisionFileExt)
+ ar.setCompression(True)
+ if self._useGZIP:
+ ar.setCompressionMethod(seiscomp.io.XMLArchive.GZIP)
+ ar.writeObject(ep)
+ ar.close()
+ # Write last file to root
+ ar.create(self._directory + "last.xml" + self._revisionFileExt)
+ ar.setCompression(True)
+ if self._useGZIP:
+ ar.setCompressionMethod(seiscomp.io.XMLArchive.GZIP)
+ ar.writeObject(ep)
+ ar.close()
+ # Write last xml
+ ar.create(self._currentDirectory +
+ self.convertID(evt.publicID()) + ".last.xml")
+ ar.setCompression(False)
+ ar.writeObject(ep)
+ ar.close()
+ self.writeLog(self._currentDirectory + self.convertID(evt.publicID()) + ".summary",
+ "|".join(summary), "a",
+ "# Layout: Timestamp, +OT (minutes, decimal), Latitude, Longitude, Depth, PhaseCount, MagType, Magnitude, MagCount")
+
+ del ep
+
+ def convertID(self, id):
+ '''Converts an ID containing slashes to one without slashes'''
+ p = re.compile('/')
+ return p.sub('_', id)
+
+ def writeLog(self, file, text, mode="a", header=None):
+ of = open(file, mode)
+ if of:
+ if of.tell() == 0 and not header is None:
+ of.write(header+"\n")
+ of.write(text+"\n")
+ of.close()
+ else:
+ seiscomp.logging.error("Unable to write file: %s" % file)
+
+ def objectAboutToPop(self, obj):
+ try:
+ evt = seiscomp.datamodel.Event.Cast(obj)
+ if evt:
+ try:
+ self._orgToEvent.pop(evt.preferredOriginID())
+ self._eventToOrg.pop(evt.publicID())
+
+ self._magToEvent.pop(evt.preferredMagnitudeID())
+ self._eventToMag.pop(evt.publicID())
+
+ self._eventProgress.pop(evt.publicID())
+ return
+ except:
+ pass
+
+ org = seiscomp.datamodel.Origin.Cast(obj)
+ if org:
+ try:
+ self._orgToEvent.pop(org.publicID())
+ except:
+ pass
+ return
+
+ mag = seiscomp.datamodel.Magnitude.Cast(obj)
+ if mag:
+ try:
+ self._magToEvent.pop(mag.publicID())
+ except:
+ pass
+ return
+ except:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+ sys.exit(-1)
+
+ def eventProgress(self, evtID, directory):
+ # The progress is already stored
+ if evtID in self._eventProgress:
+ return self._eventProgress[evtID]
+
+ # Find the maximum file counter
+ maxid = -1
+ files = os.listdir(directory)
+ for file in files:
+ if os.path.isfile(directory + file) == False:
+ continue
+ fid = file[len(evtID + '.'):len(file)]
+ sep = fid.find('.')
+ if sep == -1:
+ sep = len(fid)
+ fid = fid[0:sep]
+ try:
+ nid = int(fid)
+ except:
+ continue
+ if nid > maxid:
+ maxid = nid
+
+ maxid = maxid + 1
+ self._eventProgress[evtID] = maxid
+ return maxid
+
+ def advanceEventProgress(self, evtID):
+ try:
+ self._eventProgress[evtID] = self._eventProgress[evtID] + 1
+ except:
+ pass
+
+ def addObject(self, parentID, object):
+ try:
+ obj = seiscomp.datamodel.Event.Cast(object)
+ if obj:
+ self._cache.feed(obj)
+ self._eventProgress[obj.publicID()] = 0
+ self.printEvent(obj, True)
+ self.updateCache(obj)
+ return
+
+ # New Magnitudes or Origins are not important for
+ # the history update but we feed it into the cache to
+ # access them faster later on in case they will become
+ # preferred entities
+ obj = seiscomp.datamodel.Magnitude.Cast(object)
+ if obj:
+ self._cache.feed(obj)
+ return
+
+ obj = seiscomp.datamodel.Origin.Cast(object)
+ if obj:
+ self._cache.feed(obj)
+ return
+
+ obj = seiscomp.datamodel.Pick.Cast(object)
+ if obj:
+ self._cache.feed(obj)
+ return
+
+ obj = seiscomp.datamodel.Amplitude.Cast(object)
+ if obj:
+ self._cache.feed(obj)
+ return
+
+ except:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+ sys.exit(-1)
+
+ def updateObject(self, parentID, object):
+ try:
+ obj = seiscomp.datamodel.Event.Cast(object)
+ if obj:
+ self._cache.feed(obj)
+ self.printEvent(obj, False)
+ self.updateCache(obj)
+ return
+
+ # Updates of a Magnitude are only imported when it is
+ # the preferred one.
+ obj = seiscomp.datamodel.Magnitude.Cast(object)
+ if obj:
+ try:
+ evtID = self._magToEvent[obj.publicID()]
+ if evtID:
+ self._cache.feed(obj)
+ evt = self._cache.get(seiscomp.datamodel.Event, evtID)
+ if evt:
+ self.printEvent(evt, False)
+ else:
+ sys.stderr.write("Unable to fetch event for ID '%s' while update of magnitude '%s'\n" % (
+ evtID, obj.publicID()))
+ else:
+ # Magnitude has not been associated to an event yet
+ pass
+ except:
+ # Search the corresponding event from the database
+ evt = self.query().getEventByPreferredMagnitudeID(obj.publicID())
+ # Associate the event (even if None) with the magnitude ID
+ if evt:
+ self._magToEvent[obj.publicID()] = evt.publicID()
+ self._cache.feed(obj)
+ self.printEvent(evt, False)
+ else:
+ self._magToEvent[obj.publicID()] = None
+ return
+
+ # Usually we do not update origins. To have it complete,
+ # this case will be supported as well
+ obj = seiscomp.datamodel.Origin.Cast(object)
+ if obj:
+ try:
+ evtID = self._orgToEvent[obj.publicID()]
+ if evtID:
+ self._cache.feed(obj)
+ evt = self._cache.get(seiscomp.datamodel.Event, evtID)
+ if evt:
+ self.printEvent(evt, False)
+ else:
+ sys.stderr.write("Unable to fetch event for ID '%s' while update of origin '%s'\n" % (
+ evtID, obj.publicID()))
+ else:
+ # Origin has not been associated to an event yet
+ pass
+ except:
+ # Search the corresponding event from the database
+ evt = self.query().getEvent(obj.publicID())
+ if evt:
+ if evt.preferredOriginID() != obj.publicID():
+ evt = None
+
+ # Associate the event (even if None) with the origin ID
+ if evt:
+ self._orgToEvent[obj.publicID()] = evt.publicID()
+ self._cache.feed(obj)
+ self.printEvent(evt, False)
+ else:
+ self._orgToEvent[obj.publicID()] = None
+ return
+
+ return
+
+ except:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+ sys.exit(-1)
+
+ def updateCache(self, evt):
+ # Event-Origin update
+ try:
+ orgID = self._eventToOrg[evt.publicID()]
+ if orgID != evt.preferredOriginID():
+ self._orgToEvent.pop(orgID)
+ except:
+ # origin not yet registered
+ pass
+
+ # Bind the current preferred origin ID to the event and
+ # vice versa
+ self._orgToEvent[evt.preferredOriginID()] = evt.publicID()
+ self._eventToOrg[evt.publicID()] = evt.preferredOriginID()
+
+ # Event-Magnitude update
+ try:
+ magID = self._eventToMag[evt.publicID()]
+ if magID != evt.preferredMagnitudeID():
+ self._magToEvent.pop(magID)
+ except:
+ # not yet registered
+ pass
+
+ # Bind the current preferred magnitude ID to the event and
+ # vice versa
+ self._magToEvent[evt.preferredMagnitudeID()] = evt.publicID()
+ self._eventToMag[evt.publicID()] = evt.preferredMagnitudeID()
+
+
+app = EventHistory(len(sys.argv), sys.argv)
+sys.exit(app())
diff --git a/bin/scevtls b/bin/scevtls
new file mode 100755
index 0000000..d59871d
--- /dev/null
+++ b/bin/scevtls
@@ -0,0 +1,197 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+import sys
+import seiscomp.core
+import seiscomp.client
+import seiscomp.datamodel
+import seiscomp.logging
+
+
+def _parseTime(timestring):
+ t = seiscomp.core.Time()
+ if t.fromString(timestring, "%F %T"):
+ return t
+ if t.fromString(timestring, "%FT%T"):
+ return t
+ if t.fromString(timestring, "%FT%TZ"):
+ return t
+ return None
+
+
+class EventList(seiscomp.client.Application):
+
+ def __init__(self, argc, argv):
+ seiscomp.client.Application.__init__(self, argc, argv)
+
+ self.setMessagingEnabled(False)
+ self.setDatabaseEnabled(True, False)
+ self.setDaemonEnabled(False)
+
+ self._startTime = None
+ self._endTime = None
+ self.hours = None
+ self._delimiter = None
+ self._modifiedAfterTime = None
+ self._preferredOrigin = False
+
+ def createCommandLineDescription(self):
+ self.commandline().addGroup("Events")
+ self.commandline().addStringOption("Events", "begin",
+ "Specify the lower bound of the "
+ "time interval.")
+ self.commandline().addStringOption("Events", "end",
+ "Specify the upper bound of the "
+ "time interval.")
+ self.commandline().addStringOption("Events", "hours",
+ "Start searching given hours before"
+ " now. If set, --begin and --end "
+ "are ignored.")
+ self.commandline().addStringOption("Events", "modified-after",
+ "Select events modified after the "
+ "specified time.")
+
+ self.commandline().addGroup("Output")
+ self.commandline().addStringOption("Output", "delimiter,D",
+ "Specify the delimiter of the "
+ "resulting event IDs. "
+ "Default: '\\n')")
+ self.commandline().addOption("Output", "preferred-origin,p",
+ "Print the ID of the preferred origin "
+ "along with the event ID.")
+ return True
+
+ def init(self):
+ if not seiscomp.client.Application.init(self):
+ return False
+
+ try:
+ self.hours = float(self.commandline().optionString("hours"))
+ except RuntimeError:
+ pass
+
+ end = "2500-01-01T00:00:00Z"
+ if self.hours is None:
+ try:
+ start = self.commandline().optionString("begin")
+ except RuntimeError:
+ start = "1900-01-01T00:00:00Z"
+
+ self._startTime = _parseTime(start)
+ if self._startTime is None:
+ seiscomp.logging.error("Wrong 'begin' format '%s'" % start)
+ return False
+ seiscomp.logging.debug("Setting start to %s"
+ % self._startTime.toString("%FT%TZ"))
+
+ try:
+ end = self.commandline().optionString("end")
+ except RuntimeError:
+ pass
+
+ self._endTime = _parseTime(end)
+ if self._endTime is None:
+ seiscomp.logging.error("Wrong 'end' format '%s'" % end)
+ return False
+ seiscomp.logging.debug("Setting end to %s"
+ % self._endTime.toString("%FT%TZ"))
+ else:
+ seiscomp.logging.debug("Time window set by hours option: ignoring "
+ "all other time parameters")
+ secs = self.hours*3600
+ maxSecs = 596523 * 3600
+ if secs > maxSecs:
+ seiscomp.logging.error("Maximum hours exceeeded. Maximum is %i" % (maxSecs / 3600))
+ return False
+
+ self._startTime = seiscomp.core.Time.UTC() - seiscomp.core.TimeSpan(secs)
+ self._endTime = _parseTime(end)
+
+ try:
+ self._delimiter = self.commandline().optionString("delimiter")
+ except RuntimeError:
+ self._delimiter = "\n"
+
+ try:
+ modifiedAfter = self.commandline().optionString("modified-after")
+ self._modifiedAfterTime = _parseTime(modifiedAfter)
+ if self._modifiedAfterTime is None:
+ seiscomp.logging.error("Wrong 'modified-after' format '%s'"
+ % modifiedAfter)
+ return False
+ seiscomp.logging.debug(
+ "Setting 'modified-after' time to %s" %
+ self._modifiedAfterTime.toString("%FT%TZ"))
+ except RuntimeError:
+ pass
+
+ try:
+ self._preferredOrigin = self.commandline().hasOption("preferred-origin")
+ except RuntimeError:
+ pass
+
+ return True
+
+ def printUsage(self):
+
+ print('''Usage:
+ scevtls [options]
+
+List event IDs available in a given time range and print to stdout.''')
+
+ seiscomp.client.Application.printUsage(self)
+
+ print('''Examples:
+Print all event IDs from year 2022 and thereafter
+ scevtls -d mysql://sysop:sysop@localhost/seiscomp --begin "2022-01-01 00:00:00"
+''')
+
+ def run(self):
+ out = []
+ seiscomp.logging.debug("Search interval: %s - %s" %
+ (self._startTime, self._endTime))
+ for obj in self.query().getEvents(self._startTime, self._endTime):
+ evt = seiscomp.datamodel.Event.Cast(obj)
+ if not evt:
+ continue
+
+ if self._modifiedAfterTime is not None:
+ try:
+ if evt.creationInfo().modificationTime() < self._modifiedAfterTime:
+ continue
+ except ValueError:
+ continue
+
+ outputString = evt.publicID()
+ if self._preferredOrigin:
+ try:
+ outputString += " " + evt.preferredOriginID()
+ except ValueError:
+ outputString += " none"
+
+ out.append(outputString)
+
+ sys.stdout.write("%s\n" % self._delimiter.join(out))
+
+ return True
+
+
+def main():
+ app = EventList(len(sys.argv), sys.argv)
+ app()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/bin/scevtstreams b/bin/scevtstreams
new file mode 100755
index 0000000..61c4493
--- /dev/null
+++ b/bin/scevtstreams
@@ -0,0 +1,432 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+from __future__ import absolute_import, division, print_function
+
+import sys
+
+from seiscomp import client, core, datamodel, io
+
+
+class EventStreams(client.Application):
+
+ def __init__(self, argc, argv):
+ client.Application.__init__(self, argc, argv)
+
+ self.setMessagingEnabled(False)
+ self.setDatabaseEnabled(True, False)
+ self.setDaemonEnabled(False)
+
+ self.eventID = None
+ self.inputFile = None
+ self.inputFormat = "xml"
+ self.margin = [300]
+
+ self.allNetworks = True
+ self.allStations = True
+ self.allLocations = True
+ self.allStreams = True
+ self.allComponents = True
+
+ # filter
+ self.network = None
+ self.station = None
+
+ self.streams = []
+
+ # output format
+ self.caps = False
+ self.fdsnws = False
+
+
+ def createCommandLineDescription(self):
+ self.commandline().addGroup("Input")
+ self.commandline().addStringOption(
+ "Input", "input,i",
+ "read event from XML file instead of database. Use '-' to read "
+ "from stdin.")
+ self.commandline().addStringOption(
+ "Input", "format,f",
+ "input format to use (xml [default], zxml (zipped xml), binary). "
+ "Only relevant with --input.")
+
+ self.commandline().addGroup("Dump")
+ self.commandline().addStringOption("Dump", "event,E", "event id")
+ self.commandline().addStringOption(
+ "Dump", "margin,m",
+ "time margin around the picked time window, default is 300. Added "
+ "before the first and after the last pick, respectively. Use 2 "
+ "comma-separted values (before,after) for asymmetric margins, e.g. "
+ "-m 120,300.")
+ self.commandline().addStringOption(
+ "Dump", "streams,S",
+ "comma separated list of streams per station to add, e.g. BH,SH,HH")
+ self.commandline().addOption(
+ "Dump", "all-streams",
+ "dump all streams. If unused, just streams with picks are dumped.")
+ self.commandline().addIntOption(
+ "Dump", "all-components,C",
+ "all components or just the picked ones (0). Default is 1")
+ self.commandline().addIntOption(
+ "Dump", "all-locations,L",
+ "all locations or just the picked ones (0). Default is 1")
+ self.commandline().addOption(
+ "Dump", "all-stations",
+ "dump all stations from the same network. If unused, just stations "
+ "with picks are dumped.")
+ self.commandline().addOption(
+ "Dump", "all-networks",
+ "dump all networks. If unused, just networks with picks are dumped."
+ " This option implies all-stations, all-locations, all-streams, "
+ "all-components and will only provide the time window.")
+ self.commandline().addOption(
+ "Dump", "resolve-wildcards,R",
+ "if all components are used, use inventory to resolve stream "
+ "components instead of using '?' (important when Arclink should be "
+ "used)")
+ self.commandline().addStringOption(
+ "Dump", "net-sta", "Filter streams by network code or network and "
+ "station code. Format: NET or NET.STA")
+ self.commandline().addOption(
+ "Dump", "caps",
+ "dump in capstool format (Common Acquisition Protocol Server by "
+ "gempa GmbH)")
+ self.commandline().addOption(
+ "Dump", "fdsnws",
+ "dump in FDSN dataselect webservice POST format")
+ return True
+
+
+ def validateParameters(self):
+ if not client.Application.validateParameters(self):
+ return False
+
+ if self.commandline().hasOption("resolve-wildcards"):
+ self.setLoadStationsEnabled(True)
+
+ try:
+ self.inputFile = self.commandline().optionString("input")
+ self.setDatabaseEnabled(False, False)
+ except BaseException:
+ pass
+
+ return True
+
+
+ def init(self):
+
+ if not client.Application.init(self):
+ return False
+
+ try:
+ self.inputFormat = self.commandline().optionString("format")
+ except BaseException:
+ pass
+
+ try:
+ self.eventID = self.commandline().optionString("event")
+ except BaseException:
+ if not self.inputFile:
+ raise ValueError("An eventID is mandatory if no input file is "
+ "specified")
+
+ try:
+ self.margin = self.commandline().optionString("margin").split(",")
+ except BaseException:
+ pass
+
+ try:
+ self.streams = self.commandline().optionString("streams").split(",")
+ except BaseException:
+ pass
+
+ try:
+ self.allComponents = self.commandline().optionInt("all-components") != 0
+ except BaseException:
+ pass
+
+ try:
+ self.allLocations = self.commandline().optionInt("all-locations") != 0
+ except BaseException:
+ pass
+
+ self.allStreams = self.commandline().hasOption("all-streams")
+ self.allStations = self.commandline().hasOption("all-stations")
+ self.allNetworks = self.commandline().hasOption("all-networks")
+
+ try:
+ networkStation = self.commandline().optionString("net-sta")
+ except RuntimeError:
+ networkStation = None
+
+ if networkStation:
+ try:
+ self.network = networkStation.split('.')[0]
+ except IndexError:
+ print("Error in network code '{}': Use '--net-sta' with "
+ "format NET or NET.STA".format(networkStation), file=sys.stderr)
+ return False
+
+ try:
+ self.station = networkStation.split('.')[1]
+ except IndexError:
+ pass
+
+ self.caps = self.commandline().hasOption("caps")
+ self.fdsnws = self.commandline().hasOption("fdsnws")
+
+ return True
+
+
+ def printUsage(self):
+
+ print('''Usage:
+ scevtstreams [options]
+
+Extract stream information and time windows from an event''')
+
+ client.Application.printUsage(self)
+
+ print('''Examples:
+Get the time windows for an event in the database:
+ scevtstreams -E gfz2012abcd -d mysql://sysop:sysop@localhost/seiscomp
+
+Create lists compatible with fdsnws:
+ scevtstreams -E gfz2012abcd -i event.xml -m 120,500 --fdsnws
+''')
+
+ def run(self):
+
+ resolveWildcards = self.commandline().hasOption("resolve-wildcards")
+
+ picks = []
+
+ # read picks from input file
+ if self.inputFile:
+ picks = self.readXML()
+ if not picks:
+ raise ValueError("Could not find picks in input file")
+
+ # read picks from database
+ else:
+ for obj in self.query().getEventPicks(self.eventID):
+ pick = datamodel.Pick.Cast(obj)
+ if pick is None:
+ continue
+ picks.append(pick)
+
+ if not picks:
+ raise ValueError("Could not find picks for event {} in "
+ "database".format(self.eventID))
+
+ # filter picks
+ pickFiltered = []
+ if self.network:
+ for pick in picks:
+ if pick.waveformID().networkCode() != self.network:
+ continue
+ if self.station and self.station != pick.waveformID().stationCode():
+ continue
+ pickFiltered.append(pick)
+
+ picks = pickFiltered
+
+ if not picks:
+ raise ValueError("All picks filtered out")
+
+ # calculate minimum and maximum pick time
+ minTime = None
+ maxTime = None
+ for pick in picks:
+ if minTime is None or minTime > pick.time().value():
+ minTime = pick.time().value()
+
+ if maxTime is None or maxTime < pick.time().value():
+ maxTime = pick.time().value()
+
+ # add time margin(s), no need for None check since pick time is
+ # mandatory and at least on pick exists
+ minTime = minTime - core.TimeSpan(float(self.margin[0]))
+ maxTime = maxTime + core.TimeSpan(float(self.margin[-1]))
+
+ # convert times to string dependend on requested output format
+ if self.caps:
+ timeFMT = "%Y,%m,%d,%H,%M,%S"
+ elif self.fdsnws:
+ timeFMT = "%FT%T"
+ else:
+ timeFMT = "%F %T"
+ minTime = minTime.toString(timeFMT)
+ maxTime = maxTime.toString(timeFMT)
+
+ inv = client.Inventory.Instance().inventory()
+
+ lines = set()
+ for pick in picks:
+ net = pick.waveformID().networkCode()
+ station = pick.waveformID().stationCode()
+ loc = pick.waveformID().locationCode()
+ streams = [pick.waveformID().channelCode()]
+ rawStream = streams[0][:2]
+
+ if self.allComponents:
+ if resolveWildcards:
+ iloc = datamodel.getSensorLocation(inv, pick)
+ if iloc:
+ tc = datamodel.ThreeComponents()
+ datamodel.getThreeComponents(
+ tc, iloc, rawStream, pick.time().value())
+ streams = []
+ if tc.vertical():
+ streams.append(tc.vertical().code())
+ if tc.firstHorizontal():
+ streams.append(tc.firstHorizontal().code())
+ if tc.secondHorizontal():
+ streams.append(tc.secondHorizontal().code())
+ else:
+ streams = [rawStream + "?"]
+
+ if self.allLocations:
+ loc = "*"
+
+ if self.allStations:
+ station = "*"
+
+ if self.allNetworks:
+ net = "*"
+ station = "*"
+ loc = "*"
+
+ # FDSNWS requires empty location to be encoded by 2 dashes
+ if not loc and self.fdsnws:
+ loc = "--"
+
+ # line format
+ if self.caps:
+ lineFMT = "{0} {1} {2} {3} {4} {5}"
+ elif self.fdsnws:
+ lineFMT = "{2} {3} {4} {5} {0} {1}"
+ else:
+ lineFMT = "{0};{1};{2}.{3}.{4}.{5}"
+
+ for s in streams:
+ if self.allStreams or self.allNetworks:
+ s = "*"
+
+ lines.add(lineFMT.format(
+ minTime, maxTime, net, station, loc, s))
+
+ for s in self.streams:
+ if s == rawStream:
+ continue
+
+ if self.allStreams or self.allNetworks:
+ s = "*"
+
+ lines.add(lineFMT.format(
+ minTime, maxTime, net, station, loc, s + streams[0][2]))
+
+ for line in sorted(lines):
+ print(line, file=sys.stdout)
+
+ return True
+
+
+ def readXML(self):
+
+ if self.inputFormat == "xml":
+ ar = io.XMLArchive()
+ elif self.inputFormat == "zxml":
+ ar = io.XMLArchive()
+ ar.setCompression(True)
+ elif self.inputFormat == "binary":
+ ar = io.VBinaryArchive()
+ else:
+ raise TypeError("unknown input format '{}'".format(
+ self.inputFormat))
+
+ if not ar.open(self.inputFile):
+ raise IOError("unable to open input file")
+
+ obj = ar.readObject()
+ if obj is None:
+ raise TypeError("invalid input file format")
+
+ ep = datamodel.EventParameters.Cast(obj)
+ if ep is None:
+ raise ValueError("no event parameters found in input file")
+
+ # we require at least one origin which references to picks via arrivals
+ if ep.originCount() == 0:
+ raise ValueError("no origin found in input file")
+
+ originIDs = []
+
+ # search for a specific event id
+ if self.eventID:
+ ev = datamodel.Event.Find(self.eventID)
+ if ev:
+ originIDs = [ev.originReference(i).originID() \
+ for i in range(ev.originReferenceCount())]
+ else:
+ raise ValueError("event id {} not found in input file".format(
+ self.eventID))
+
+ # use first event/origin if no id was specified
+ else:
+ # no event, use first available origin
+ if ep.eventCount() == 0:
+ if ep.originCount() > 1:
+ print("WARNING: Input file contains no event but more than "
+ "1 origin. Considering only first origin",
+ file=sys.stderr)
+ originIDs.append(ep.origin(0).publicID())
+
+ # use origin references of first available event
+ else:
+ if ep.eventCount() > 1:
+ print("WARNING: Input file contains more than 1 event. "
+ "Considering only first event", file=sys.stderr)
+ ev = ep.event(0)
+ originIDs = [ev.originReference(i).originID() \
+ for i in range(ev.originReferenceCount())]
+
+ # collect pickIDs
+ pickIDs = set()
+ for oID in originIDs:
+ o = datamodel.Origin.Find(oID)
+ if o is None:
+ continue
+
+ for i in range(o.arrivalCount()):
+ pickIDs.add(o.arrival(i).pickID())
+
+ # lookup picks
+ picks = []
+ for pickID in pickIDs:
+ pick = datamodel.Pick.Find(pickID)
+ if pick:
+ picks.append(pick)
+
+ return picks
+
+
+if __name__ == '__main__':
+ try:
+ app = EventStreams(len(sys.argv), sys.argv)
+ sys.exit(app())
+ except (ValueError, TypeError) as e:
+ print("ERROR: {}".format(e), file=sys.stderr)
+ sys.exit(1)
diff --git a/bin/scgitinit b/bin/scgitinit
new file mode 100755
index 0000000..6c25b47
--- /dev/null
+++ b/bin/scgitinit
@@ -0,0 +1,38 @@
+#!/bin/bash
+# Initializes a GIT repository in $SEISCOMP_ROOT and adds important
+# configuration files from 'etc' and 'share' directory
+#
+# Author: Stephan Herrnkind
+
+
+# search for SeisComP path
+if [ x"$SEISCOMP_ROOT" = x ]; then
+ echo "SEISCOMP_ROOT not set"
+ exit 1
+fi
+
+# search git binary
+which git > /dev/null
+if [ $? -ne 0 ]; then
+ echo "git binary not found"
+ exit 2
+fi
+
+cd $SEISCOMP_ROOT || exit 3
+
+# initialize git if necessary
+[ -d .git ] || git rev-parse --git-dir > /dev/null 2>&1
+if [ $? -eq 0 ]; then
+ echo "GIT repository in $SEISCOMP_ROOT already initialized"
+else
+ git init || exit 4
+fi
+
+# add files
+git add etc
+find share -type f -regex \
+ ".*\.\(bna\|cfg\|conf\|htaccess\|kml\|py\|sh\|tpl\|tvel\|txt\|xml\)" \
+ -execdir git add {} +
+
+echo "files added to GIT, use 'git status' to get an overview and " \
+ "'git commit' to commit them"
diff --git a/bin/scheli b/bin/scheli
new file mode 100755
index 0000000..db78ecc
Binary files /dev/null and b/bin/scheli differ
diff --git a/bin/scimex b/bin/scimex
new file mode 100755
index 0000000..f4286f3
Binary files /dev/null and b/bin/scimex differ
diff --git a/bin/scimport b/bin/scimport
new file mode 100755
index 0000000..e386f0d
Binary files /dev/null and b/bin/scimport differ
diff --git a/bin/scinv b/bin/scinv
new file mode 100755
index 0000000..697cbe9
Binary files /dev/null and b/bin/scinv differ
diff --git a/bin/scm b/bin/scm
new file mode 100755
index 0000000..f142781
Binary files /dev/null and b/bin/scm differ
diff --git a/bin/scmag b/bin/scmag
new file mode 100755
index 0000000..44788e6
Binary files /dev/null and b/bin/scmag differ
diff --git a/bin/scmapcut b/bin/scmapcut
new file mode 100755
index 0000000..d18646f
Binary files /dev/null and b/bin/scmapcut differ
diff --git a/bin/scmaster b/bin/scmaster
new file mode 100755
index 0000000..e16b81c
Binary files /dev/null and b/bin/scmaster differ
diff --git a/bin/scml2inv b/bin/scml2inv
new file mode 100755
index 0000000..a315edf
--- /dev/null
+++ b/bin/scml2inv
@@ -0,0 +1,84 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+import sys
+import getopt
+import seiscomp.io
+import seiscomp.datamodel
+
+
+usage = """scml2inv [options] input output=stdout
+
+Options:
+ -h [ --help ] Produce help message
+ -f Enable formatted XML output
+"""
+
+
+def main(argv):
+ formatted = False
+
+ # parse command line options
+ try:
+ opts, args = getopt.getopt(argv[1:], "hf", ["help"])
+ except getopt.error as msg:
+ sys.stderr.write("%s\n" % msg)
+ sys.stderr.write("for help use --help\n")
+ return 1
+
+ for o, a in opts:
+ if o in ["-h", "--help"]:
+ sys.stderr.write("%s\n" % usage)
+ return 1
+ elif o in ["-f"]:
+ formatted = True
+
+ argv = args
+ if len(argv) < 1:
+ sys.stderr.write("Missing input file\n")
+ return 1
+
+ ar = seiscomp.io.XMLArchive()
+ if not ar.open(argv[0]):
+ sys.stderr.write("Unable to parse input file: %s\n" % argv[0])
+ return 2
+
+ obj = ar.readObject()
+ ar.close()
+
+ if obj is None:
+ sys.stderr.write("Empty document in %s\n" % argv[0])
+ return 3
+
+ inv = seiscomp.datamodel.Inventory.Cast(obj)
+ if inv is None:
+ sys.stderr.write("No inventory found in %s\n" % argv[0])
+ return 4
+
+ if len(argv) < 2:
+ output_file = "-"
+ else:
+ output_file = argv[1]
+
+ ar.create(output_file)
+ ar.setFormattedOutput(formatted)
+ ar.writeObject(inv)
+ ar.close()
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv))
diff --git a/bin/scmm b/bin/scmm
new file mode 100755
index 0000000..7ecef2e
Binary files /dev/null and b/bin/scmm differ
diff --git a/bin/scmssort b/bin/scmssort
new file mode 100755
index 0000000..26c5482
--- /dev/null
+++ b/bin/scmssort
@@ -0,0 +1,416 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+from __future__ import absolute_import, division, print_function
+
+import sys
+import os
+import re
+import argparse
+import seiscomp.core
+import seiscomp.io
+
+
+class MyArgumentParser(argparse.ArgumentParser):
+ def format_epilog(self):
+ return self.epilog
+
+
+def str2time(timestring):
+ """
+ Liberally accept many time string formats and convert them to a
+ seiscomp.core.Time
+ """
+
+ timestring = timestring.strip()
+ for c in ["-", "/", ":", "T", "Z"]:
+ timestring = timestring.replace(c, " ")
+ timestring = timestring.split()
+ assert 3 <= len(timestring) <= 6
+ timestring.extend((6 - len(timestring)) * ["0"])
+ timestring = " ".join(timestring)
+ timeFormat = "%Y %m %d %H %M %S"
+ if timestring.find(".") != -1:
+ timeFormat += ".%f"
+
+ time = seiscomp.core.Time()
+ time.fromString(timestring, timeFormat)
+ return time
+
+
+def time2str(time):
+ """
+ Convert a seiscomp.core.Time to a string
+ """
+ return time.toString("%Y-%m-%d %H:%M:%S.%f000000")[:23]
+
+
+def recordInput(filename=None, datatype=seiscomp.core.Array.INT):
+ """
+ Simple Record iterator that reads from a file (to be specified by
+ filename) or -- if no filename was specified -- reads from standard input
+ """
+
+ stream = seiscomp.io.RecordStream.Create("file")
+ if not stream:
+ raise IOError("failed to create a RecordStream")
+
+ if not filename:
+ filename = "-"
+
+ if filename == "-":
+ print(
+ "Waiting for data input from stdin. Use Ctrl + C to interrupt.",
+ file=sys.stderr,
+ )
+ else:
+ if not os.path.exists(filename):
+ print("Cannot find file {}".format(filename), file=sys.stderr)
+ sys.exit()
+
+ if not stream.setSource(filename):
+ print(" + failed to assign source file to RecordStream", file=sys.stderr)
+ sys.exit()
+
+ records = seiscomp.io.RecordInput(stream, datatype, seiscomp.core.Record.SAVE_RAW)
+
+ while True:
+ try:
+ record = next(records)
+ except Exception:
+ print("Received invalid or no input", file=sys.stderr)
+ sys.exit()
+
+ if not record:
+ return
+ yield record
+
+
+tmin = str2time("1970-01-01 00:00:00")
+tmax = str2time("2500-01-01 00:00:00")
+ifile = "-"
+
+description = (
+ "Read unsorted and possibly multiplexed miniSEED files. "
+ "Sort data by time (multiplexing) and filter the individual "
+ "records by time and/or streams. Apply this before playbacks "
+ "and waveform archiving."
+)
+
+epilog = (
+ "Examples:\n"
+ "Read data from multiple files, extract streams by time, sort records by start "
+ "time, remove duplicate records\n"
+ " cat f1.mseed f2.mseed f3.mseed |\\\n"
+ " scmssort -v -t '2007-03-28 15:48~2007-03-28 16:18' -u > sorted.mseed\n"
+ "\n"
+ "Extract streams by time, stream code and sort records by end time\n"
+ " echo CX.PB01..BH? |\\ \n"
+ " scmssort -v -E -t '2007-03-28 15:48~2007-03-28 16:18' -u -l - test.mseed > "
+ "sorted.mseed"
+)
+
+
+# p = MyArgumentParser(
+# usage="\n %prog [options] [files | < ] > ", description=description, epilog=epilog
+# )
+p = MyArgumentParser(
+ description=description,
+ epilog=epilog,
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+)
+p.add_argument(
+ "-E",
+ "--sort-by-end-time",
+ action="store_true",
+ help="Sort according to record end time; default is start time.",
+)
+p.add_argument(
+ "-r",
+ "--rm",
+ action="store_true",
+ help="Remove all traces in stream list given by --list instead of keeping them.",
+)
+p.add_argument(
+ "-l",
+ "--list",
+ action="store",
+ help="File with stream list to filter the records. "
+ "One stream per line. Instead of a file read the from stdin (-). "
+ "Line format: NET.STA.LOC.CHA - wildcards and regular expressions "
+ "are considered. Example: CX.*..BH?.",
+)
+p.add_argument(
+ "-t",
+ "--time-window",
+ action="store",
+ help="Specify time window (as one -properly quoted- string). Times "
+ "are of course UTC and separated by a tilde '~'.",
+)
+p.add_argument(
+ "-u",
+ "--uniqueness",
+ action="store_true",
+ help="Ensure uniqueness of output, i.e. skip duplicate records.",
+)
+p.add_argument("-v", "--verbose", action="store_true", help="Run in verbose mode.")
+
+p.add_argument(
+ "filenames",
+ nargs="+",
+ help="Names of input files in miniSEED format.",
+)
+opt = p.parse_args()
+filenames = opt.filenames
+
+if opt.time_window:
+ tmin, tmax = list(map(str2time, opt.time_window.split("~")))
+
+if opt.verbose:
+ print(
+ "Considered time window: %s~%s" % (time2str(tmin), time2str(tmax)),
+ file=sys.stderr,
+ )
+
+listFile = None
+removeStreams = False
+if opt.list:
+ listFile = opt.list
+ print("Considered stream list from: %s" % (listFile), file=sys.stderr)
+
+ if opt.rm:
+ removeStreams = True
+ print("Removing listed streams", file=sys.stderr)
+
+
+def _time(record):
+ if opt.sort_by_end_time:
+ return seiscomp.core.Time(record.endTime())
+ return seiscomp.core.Time(record.startTime())
+
+
+def _in_time_window(record, tMin, tMax):
+ return record.endTime() >= tMin and record.startTime() <= tMax
+
+
+def readStreamList(file):
+ streamList = []
+
+ try:
+ if file == "-":
+ f = sys.stdin
+ file = "stdin"
+ else:
+ f = open(listFile, "r", encoding="utf-8")
+ except FileNotFoundError:
+ print("%s: error: unable to open" % listFile, file=sys.stderr)
+ return []
+
+ lineNumber = -1
+ for line in f:
+ lineNumber = lineNumber + 1
+ line = line.strip()
+ # ignore comments
+ if len(line) > 0 and line[0] == "#":
+ continue
+
+ if len(line) == 0:
+ continue
+
+ toks = line.split(".")
+ if len(toks) != 4:
+ f.close()
+ print(
+ "error: %s in line %d has invalid line format, expected "
+ "stream list: NET.STA.LOC.CHA - 1 line per stream including "
+ "regular expressions" % (listFile, lineNumber),
+ file=sys.stderr,
+ )
+ return []
+
+ streamList.append(line)
+
+ f.close()
+
+ if len(streamList) == 0:
+ return []
+
+ return streamList
+
+
+if not filenames:
+ filenames = ["-"]
+
+streams = None
+if listFile:
+ streams = readStreamList(listFile)
+ if not streams and not removeStreams:
+ print(" + cannot extract data", file=sys.stderr)
+ sys.exit()
+
+ if opt.verbose:
+ string = " + streams: "
+
+ for stream in streams:
+ string += stream + " "
+ print("%s" % (string), file=sys.stderr)
+
+ pattern = re.compile("|".join(streams))
+
+readRecords = 0
+networks = set()
+stations = set()
+locations = set()
+channels = set()
+readStreams = set()
+outEnd = None
+outStart = None
+
+if filenames:
+ first = None
+ time_raw = []
+ for fileName in filenames:
+ if opt.verbose:
+ print("Reading file '%s'" % fileName, file=sys.stderr)
+
+ for rec in recordInput(fileName):
+ if not rec:
+ continue
+
+ if not _in_time_window(rec, tmin, tmax):
+ continue
+
+ raw = rec.raw().str()
+ streamCode = "%s.%s.%s.%s" % (
+ rec.networkCode(),
+ rec.stationCode(),
+ rec.locationCode(),
+ rec.channelCode(),
+ )
+
+ if listFile:
+ foundStream = False
+
+ if pattern.match(streamCode):
+ foundStream = True
+
+ if removeStreams:
+ foundStream = not foundStream
+
+ if not foundStream:
+ continue
+
+ # collect statistics for verbosity mode
+ if opt.verbose:
+ networks.add(rec.networkCode())
+ stations.add(rec.stationCode())
+ locations.add(rec.locationCode())
+ channels.add(rec.channelCode())
+ readStreams.add(streamCode)
+ readRecords += 1
+
+ start = rec.startTime()
+ end = rec.endTime()
+
+ if (outStart is None) or (start < outStart):
+ outStart = seiscomp.core.Time(start)
+
+ if (outEnd is None) or (end > outEnd):
+ outEnd = seiscomp.core.Time(end)
+
+ t = _time(rec)
+ if first is None:
+ first = t
+ t = float(t - first) # float needs less memory
+ time_raw.append((t, raw))
+
+ if opt.verbose:
+ print(
+ " + %d networks, %d stations, %d sensor locations, "
+ "%d channel codes, %d streams, %d records"
+ % (
+ len(networks),
+ len(stations),
+ len(locations),
+ len(channels),
+ len(readStreams),
+ readRecords,
+ ),
+ file=sys.stderr,
+ )
+ print("Sorting records", file=sys.stderr)
+ time_raw.sort()
+
+ if opt.verbose:
+ print("Writing output", file=sys.stderr)
+ previous = None
+
+ out = sys.stdout
+ try:
+ # needed in Python 3, fails in Python 2
+ out = out.buffer
+ except AttributeError:
+ # assuming this is Python 2, nothing to be done
+ pass
+
+ duplicates = 0
+ for item in time_raw:
+ if item == previous:
+ duplicates += 1
+ if opt.uniqueness:
+ continue
+
+ t, raw = item
+ out.write(raw)
+
+ previous = item
+
+ if opt.verbose:
+ print("Finished", file=sys.stderr)
+ if opt.uniqueness:
+ print(
+ " + found and removed {} duplicate records".format(duplicates),
+ file=sys.stderr,
+ )
+ else:
+ if duplicates > 0:
+ print(
+ " + found {} duplicate records - remove with: scmssort -u".format(
+ duplicates
+ ),
+ file=sys.stderr,
+ )
+ else:
+ print(" + found 0 duplicate records", file=sys.stderr)
+
+ print("Output:", file=sys.stderr)
+ if outStart and outEnd:
+ print(
+ " + time window: %s~%s"
+ % (seiscomp.core.Time(outStart), seiscomp.core.Time(outEnd)),
+ file=sys.stderr,
+ )
+ else:
+ print("No data found in time window", file=sys.stderr)
+
+ else:
+ # This is an important hint which should always be printed
+ if duplicates > 0 and not opt.uniqueness:
+ print(
+ "Found {} duplicate records - remove with: scmssort -u".format(
+ duplicates
+ ),
+ file=sys.stderr,
+ )
diff --git a/bin/scmv b/bin/scmv
new file mode 100755
index 0000000..d3079d5
Binary files /dev/null and b/bin/scmv differ
diff --git a/bin/scolv b/bin/scolv
new file mode 100755
index 0000000..940bf3f
Binary files /dev/null and b/bin/scolv differ
diff --git a/bin/scorg2nll b/bin/scorg2nll
new file mode 100755
index 0000000..66a72aa
Binary files /dev/null and b/bin/scorg2nll differ
diff --git a/bin/scorgls b/bin/scorgls
new file mode 100755
index 0000000..9ddc67f
--- /dev/null
+++ b/bin/scorgls
@@ -0,0 +1,131 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+import sys
+import seiscomp.core
+import seiscomp.client
+import seiscomp.datamodel
+
+
+class OriginList(seiscomp.client.Application):
+ def __init__(self, argc, argv):
+ seiscomp.client.Application.__init__(self, argc, argv)
+
+ self.setMessagingEnabled(False)
+ self.setDatabaseEnabled(True, False)
+ self.setDaemonEnabled(False)
+
+ self._startTime = seiscomp.core.Time()
+ self._endTime = seiscomp.core.Time.GMT()
+ self._delimiter = None
+
+ def createCommandLineDescription(self):
+ self.commandline().addGroup("Origins")
+ self.commandline().addStringOption("Origins", "begin",
+ "The lower bound of the time interval. Format: '1970-01-01 00:00:00'.")
+ self.commandline().addStringOption("Origins", "end",
+ "The upper bound of the time interval. Format: '1970-01-01 00:00:00'.")
+ self.commandline().addStringOption("Origins", "author",
+ "The author of the origins.")
+
+ self.commandline().addGroup("Output")
+ self.commandline().addStringOption("Output", "delimiter,D",
+ "The delimiter of the resulting "
+ "origin IDs. Default: '\\n')")
+ return True
+
+ def init(self):
+ if not seiscomp.client.Application.init(self):
+ return False
+
+ try:
+ start = self.commandline().optionString("begin")
+ if not self._startTime.fromString(start, "%F %T"):
+ print("Wrong 'begin' given -> assuming {}"
+ .format(self._startTime), file=sys.stderr)
+ except RuntimeError:
+ print("No 'begin' given -> assuming {}".format(self._startTime),
+ file=sys.stderr)
+
+ try:
+ end = self.commandline().optionString("end")
+ if not self._endTime.fromString(end, "%F %T"):
+ print("Wrong 'end' given -> assuming {}"
+ .format(self._endTime), file=sys.stderr)
+ except RuntimeError:
+ print("No 'end' given -> assuming {}".format(self._endTime),
+ file=sys.stderr)
+
+ try:
+ self.author = self.commandline().optionString("author")
+ sys.stderr.write("%s author used for output\n" % (self.author))
+ except RuntimeError:
+ self.author = False
+
+ try:
+ self._delimiter = self.commandline().optionString("delimiter")
+ except RuntimeError:
+ self._delimiter = "\n"
+
+# sys.stderr.write("Setting end to %s\n" % self._endTime.toString("%F %T"))
+
+ return True
+
+ def printUsage(self):
+
+ print('''Usage:
+ scorgls [options]
+
+List origin IDs available in a given time range and print to stdout.''')
+
+ seiscomp.client.Application.printUsage(self)
+
+ print('''Examples:
+Print all origin IDs from year 2022 and thereafter
+ scorgls -d mysql://sysop:sysop@localhost/seiscomp --begin "2022-01-01 00:00:00"
+''')
+
+ def run(self):
+ seiscomp.logging.debug("Search interval: %s - %s" %
+ (self._startTime, self._endTime))
+ out = []
+ q = "select PublicObject.%s, Origin.* from Origin, PublicObject where Origin._oid=PublicObject._oid and Origin.%s >= '%s' and Origin.%s < '%s'" %\
+ (self.database().convertColumnName("publicID"),
+ self.database().convertColumnName("time_value"),
+ self.database().timeToString(self._startTime),
+ self.database().convertColumnName("time_value"),
+ self.database().timeToString(self._endTime))
+
+ if self.author:
+ q += " and Origin.%s = '%s' " %\
+ (self.database().convertColumnName("creationInfo_author"),
+ self.query().toString(self.author))
+
+ for obj in self.query().getObjectIterator(q, seiscomp.datamodel.Origin.TypeInfo()):
+ org = seiscomp.datamodel.Origin.Cast(obj)
+ if org:
+ out.append(org.publicID())
+
+ print("{}\n".format(self._delimiter.join(out)), file=sys.stdout)
+ return True
+
+
+def main():
+ app = OriginList(len(sys.argv), sys.argv)
+ app()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/bin/scplot b/bin/scplot
new file mode 100755
index 0000000..4bb1c83
Binary files /dev/null and b/bin/scplot differ
diff --git a/bin/scproclat b/bin/scproclat
new file mode 100755
index 0000000..908c0a5
--- /dev/null
+++ b/bin/scproclat
@@ -0,0 +1,328 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+import time, sys, os, traceback
+import seiscomp.core, seiscomp.client, seiscomp.datamodel
+import seiscomp.logging, seiscomp.system
+
+
+def createDirectory(dir):
+ if os.access(dir, os.W_OK):
+ return True
+
+ try:
+ os.makedirs(dir)
+ return True
+ except:
+ return False
+
+
+def timeToString(t):
+ return t.toString("%T.%6f")
+
+
+def timeSpanToString(ts):
+ neg = ts.seconds() < 0 or ts.microseconds() < 0
+ secs = abs(ts.seconds())
+ days = secs / 86400
+ daySecs = secs % 86400
+ hours = daySecs / 3600
+ hourSecs = daySecs % 3600
+ mins = hourSecs / 60
+ secs = hourSecs % 60
+ usecs = abs(ts.microseconds())
+
+ if neg:
+ return "-%.2d:%.2d:%.2d:%.2d.%06d" % (days, hours, mins, secs, usecs)
+ else:
+ return "%.2d:%.2d:%.2d:%.2d.%06d" % (days, hours, mins, secs, usecs)
+
+
+class ProcLatency(seiscomp.client.Application):
+ def __init__(self, argc, argv):
+ seiscomp.client.Application.__init__(self, argc, argv)
+
+ self.setMessagingEnabled(True)
+ self.setDatabaseEnabled(False, False)
+
+ self.setAutoApplyNotifierEnabled(False)
+ self.setInterpretNotifierEnabled(True)
+
+ self.addMessagingSubscription("PICK")
+ self.addMessagingSubscription("AMPLITUDE")
+ self.addMessagingSubscription("LOCATION")
+ self.addMessagingSubscription("MAGNITUDE")
+ self.addMessagingSubscription("EVENT")
+
+ self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP)
+
+ self._directory = ""
+ self._nowDirectory = ""
+ self._triggeredDirectory = ""
+ self._logCreated = False
+
+ def createCommandLineDescription(self):
+ try:
+ self.commandline().addGroup("Storage")
+ self.commandline().addStringOption(
+ "Storage", "directory,o", "Specify the storage directory")
+ except:
+ seiscomp.logging.warning(
+ "caught unexpected error %s" % sys.exc_info())
+
+ def initConfiguration(self):
+ if not seiscomp.client.Application.initConfiguration(self):
+ return False
+
+ try:
+ self._directory = self.configGetString("directory")
+ except:
+ pass
+
+ try:
+ self._logCreated = self.configGetBool("logMsgLatency")
+ except:
+ pass
+
+ return True
+
+ def init(self):
+ if not seiscomp.client.Application.init(self):
+ return False
+
+ try:
+ self._directory = self.commandline().optionString("directory")
+ except:
+ pass
+
+ try:
+ if self._directory[-1] != "/":
+ self._directory = self._directory + "/"
+ except:
+ pass
+
+ if self._directory:
+ self._directory = seiscomp.system.Environment.Instance().absolutePath(self._directory)
+ sys.stderr.write("Logging latencies to %s\n" % self._directory)
+
+ return True
+
+ def addObject(self, parentID, obj):
+ try:
+ self.logObject(parentID, obj, False)
+ except:
+ sys.stderr.write("%s\n" % traceback.format_exc())
+
+ def updateObject(self, parentID, obj):
+ try:
+ self.logObject("", obj, True)
+ except:
+ sys.stderr.write("%s\n" % traceback.format_exc())
+
+ def logObject(self, parentID, obj, update):
+ now = seiscomp.core.Time.GMT()
+ time = None
+
+ pick = seiscomp.datamodel.Pick.Cast(obj)
+ if pick:
+ phase = ""
+ try:
+ phase = pick.phaseHint().code()
+ except:
+ pass
+
+ created = None
+ if self._logCreated:
+ try:
+ created = pick.creationInfo().creationTime()
+ except:
+ pass
+
+ self.logStation(now, created, pick.time().value(
+ ), pick.publicID() + ";P;" + phase, pick.waveformID(), update)
+ return
+
+ amp = seiscomp.datamodel.Amplitude.Cast(obj)
+ if amp:
+ created = None
+ if self._logCreated:
+ try:
+ created = amp.creationInfo().creationTime()
+ except:
+ pass
+
+ try:
+ self.logStation(now, created, amp.timeWindow().reference(), amp.publicID(
+ ) + ";A;" + amp.type() + ";" + "%.2f" % amp.amplitude().value(), amp.waveformID(), update)
+ except:
+ pass
+ return
+
+ org = seiscomp.datamodel.Origin.Cast(obj)
+ if org:
+ status = ""
+ lat = "%.2f" % org.latitude().value()
+ lon = "%.2f" % org.longitude().value()
+ try:
+ depth = "%d" % org.depth().value()
+ except:
+ pass
+
+ try:
+ status = seiscomp.datamodel.EOriginStatusNames.name(
+ org.status())
+ except:
+ pass
+
+ self.logFile(now, org.time().value(), org.publicID(
+ ) + ";O;" + status + ";" + lat + ";" + lon + ";" + depth, update)
+ return
+
+ mag = seiscomp.datamodel.Magnitude.Cast(obj)
+ if mag:
+ count = ""
+ try:
+ count = "%d" % mag.stationCount()
+ except:
+ pass
+ self.logFile(now, None, mag.publicID() + ";M;" + mag.type() +
+ ";" + "%.4f" % mag.magnitude().value() + ";" + count, update)
+ return
+
+ orgref = seiscomp.datamodel.OriginReference.Cast(obj)
+ if orgref:
+ self.logFile(now, None, parentID + ";OR;" +
+ orgref.originID(), update)
+ return
+
+ evt = seiscomp.datamodel.Event.Cast(obj)
+ if evt:
+ self.logFile(now, None, evt.publicID(
+ ) + ";E;" + evt.preferredOriginID() + ";" + evt.preferredMagnitudeID(), update)
+ return
+
+ def logStation(self, received, created, triggered, text, waveformID, update):
+ streamID = waveformID.networkCode() + "." + waveformID.stationCode() + "." + \
+ waveformID.locationCode() + "." + waveformID.channelCode()
+
+ aNow = received.get()
+ aTriggered = triggered.get()
+
+ nowDirectory = self._directory + \
+ "/".join(["%.2d" % i for i in aNow[1:4]]) + "/"
+ triggeredDirectory = self._directory + \
+ "/".join(["%.2d" % i for i in aTriggered[1:4]]) + "/"
+
+ logEntry = timeSpanToString(received - triggered) + ";"
+ if created is not None:
+ logEntry = logEntry + timeSpanToString(received - created) + ";"
+ else:
+ logEntry = logEntry + ";"
+
+ if update:
+ logEntry = logEntry + "U"
+ else:
+ logEntry = logEntry + "A"
+
+ logEntry = logEntry + ";" + text
+
+ sys.stdout.write("%s;%s\n" % (timeToString(received), logEntry))
+
+ if nowDirectory != self._nowDirectory:
+ if createDirectory(nowDirectory) == False:
+ seiscomp.logging.error(
+ "Unable to create directory %s" % nowDirectory)
+ return False
+
+ self._nowDirectory = nowDirectory
+
+ self.writeLog(self._nowDirectory + streamID + ".rcv",
+ timeToString(received) + ";" + logEntry)
+
+ if triggeredDirectory != self._triggeredDirectory:
+ if createDirectory(triggeredDirectory) == False:
+ seiscomp.logging.error(
+ "Unable to create directory %s" % triggeredDirectory)
+ return False
+
+ self._triggeredDirectory = triggeredDirectory
+
+ self.writeLog(self._triggeredDirectory + streamID +
+ ".trg", timeToString(triggered) + ";" + logEntry)
+
+ return True
+
+ def logFile(self, received, triggered, text, update):
+ aNow = received.get()
+ nowDirectory = self._directory + \
+ "/".join(["%.2d" % i for i in aNow[1:4]]) + "/"
+ triggeredDirectory = None
+
+ #logEntry = timeToString(received)
+ logEntry = ""
+
+ if not triggered is None:
+ aTriggered = triggered.get()
+ triggeredDirectory = self._directory + \
+ "/".join(["%.2d" % i for i in aTriggered[1:4]]) + "/"
+
+ logEntry = logEntry + timeSpanToString(received - triggered)
+
+ logEntry = logEntry + ";"
+
+ if update:
+ logEntry = logEntry + "U"
+ else:
+ logEntry = logEntry + "A"
+
+ logEntry = logEntry + ";" + text
+
+ sys.stdout.write("%s;%s\n" % (timeToString(received), logEntry))
+
+ if nowDirectory != self._nowDirectory:
+ if createDirectory(nowDirectory) == False:
+ seiscomp.logging.error(
+ "Unable to create directory %s" % nowDirectory)
+ return False
+
+ self._nowDirectory = nowDirectory
+
+ self.writeLog(self._nowDirectory + "objects.rcv",
+ timeToString(received) + ";" + logEntry)
+
+ if triggeredDirectory:
+ if triggeredDirectory != self._triggeredDirectory:
+ if createDirectory(triggeredDirectory) == False:
+ seiscomp.logging.error(
+ "Unable to create directory %s" % triggeredDirectory)
+ return False
+
+ self._triggeredDirectory = triggeredDirectory
+
+ self.writeLog(self._triggeredDirectory + "objects.trg",
+ timeToString(triggered) + ";" + logEntry)
+
+ return True
+
+ def writeLog(self, file, text):
+ of = open(file, "a")
+ if of:
+ of.write(text)
+ of.write("\n")
+ of.close()
+
+
+app = ProcLatency(len(sys.argv), sys.argv)
+sys.exit(app())
diff --git a/bin/scqc b/bin/scqc
new file mode 100755
index 0000000..9c25211
Binary files /dev/null and b/bin/scqc differ
diff --git a/bin/scqcv b/bin/scqcv
new file mode 100755
index 0000000..13f7bb3
Binary files /dev/null and b/bin/scqcv differ
diff --git a/bin/scquery b/bin/scquery
new file mode 100755
index 0000000..7c84319
Binary files /dev/null and b/bin/scquery differ
diff --git a/bin/scqueryqc b/bin/scqueryqc
new file mode 100755
index 0000000..568449b
--- /dev/null
+++ b/bin/scqueryqc
@@ -0,0 +1,252 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) 2021 by gempa GmbH #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+# #
+# adopted from scqcquery #
+# Author: Dirk Roessler, gempa GmbH #
+# Email: roessler@gempa.de #
+############################################################################
+
+from __future__ import absolute_import, division, print_function
+
+import sys
+import re
+import seiscomp.core
+import seiscomp.client
+import seiscomp.io
+import seiscomp.datamodel
+
+qcParamsDefault = "latency,delay,timing,offset,rms,availability,"\
+ "'gaps count','gaps interval','gaps length',"\
+ "'overlaps count','overlaps interval','overlaps length',"\
+ "'spikes count','spikes interval','spikes amplitude'"
+
+
+def getStreamsFromInventory(self):
+ try:
+ dbr = seiscomp.datamodel.DatabaseReader(self.database())
+ inv = seiscomp.datamodel.Inventory()
+ dbr.loadNetworks(inv)
+
+ streamList = set()
+ for inet in range(inv.networkCount()):
+ network = inv.network(inet)
+ dbr.load(network)
+ for ista in range(network.stationCount()):
+ station = network.station(ista)
+ try:
+ start = station.start()
+ except Exception:
+ continue
+ try:
+ end = station.end()
+ if not start <= self._end <= end and end >= self._start:
+ continue
+ except Exception:
+ pass
+
+ for iloc in range(station.sensorLocationCount()):
+ location = station.sensorLocation(iloc)
+ for istr in range(location.streamCount()):
+ stream = location.stream(istr)
+ streamID = network.code() + "." + station.code() \
+ + "." + location.code() + "." + stream.code()
+ streamList.add(streamID)
+
+ return list(streamList)
+
+ except Exception:
+ return False
+
+
+class WfqQuery(seiscomp.client.Application):
+
+ def __init__(self, argc, argv):
+ seiscomp.client.Application.__init__(self, argc, argv)
+
+ self.setMessagingEnabled(False)
+ self.setDatabaseEnabled(True, False)
+ self.setLoggingToStdErr(True)
+ self.setDaemonEnabled(False)
+
+ self._streams = False
+ self._fromInventory = False
+ self._outfile = '-'
+ self._parameter = qcParamsDefault
+ self._start = "1900-01-01T00:00:00Z"
+ self._end = str(seiscomp.core.Time.GMT())
+ self._formatted = False
+
+ def createCommandLineDescription(self):
+ self.commandline().addGroup("Output")
+ self.commandline().addStringOption("Output", "output,o",
+ "output file name for XML. Writes "
+ "to stdout if not given.")
+ self.commandline().addOption("Output", "formatted,f",
+ "write formatted XML")
+
+ self.commandline().addGroup("Query")
+ self.commandline().addStringOption(
+ "Query", "begin,b", "Begin time of query: 'YYYY-MM-DD hh:mm:ss'")
+ self.commandline().addStringOption(
+ "Query", "end,e", "End time of query: 'YYYY-MM-DD hh:mm:ss'")
+ self.commandline().addStringOption(
+ "Query", "stream-id,i",
+ "Waveform stream ID to search for QC parameters: net.sta.loc.cha -"
+ " [networkCode].[stationCode].[sensorLocationCode].[channelCode]. "
+ "Provide a single ID or a comma-separated list. Overrides "
+ "--streams-from-inventory")
+ self.commandline().addStringOption(
+ "Query", "parameter,p",
+ "QC parameter to output: (e.g. delay, rms, 'gaps count' ...). "
+ "Provide a single parameter or a comma-separated list. Defaults "
+ "apply if parameter is not given.")
+ self.commandline().addOption("Query", "streams-from-inventory",
+ "Read streams from inventory. Superseded"
+ " by stream-id.")
+
+
+ return True
+
+ def printUsage(self):
+ print('''Usage:
+ scqueryqc [options]
+
+Query a database for waveform quality control (QC) parameters.''', file=sys.stderr)
+
+ seiscomp.client.Application.printUsage(self)
+
+ print('''Default QC parameters: {}
+ '''.format(qcParamsDefault), file=sys.stderr)
+ print('''Examples:
+Query rms and delay values for streams 'AU.AS18..SHZ' and 'AU.AS19..SHZ' from '2021-11-20 00:00:00' until current
+ scqueryqc -d localhost -b '2021-11-20 00:00:00' -p rms,delay -i AU.AS18..SHZ,AU.AS19..SHZ
+ ''', file=sys.stderr)
+
+ def validateParameters(self):
+ if not seiscomp.client.Application.validateParameters(self):
+ return False
+
+ try:
+ self._streams = self.commandline().optionString("stream-id").split(",")
+ except RuntimeError:
+ pass
+
+ try:
+ self._fromInventory = self.commandline().hasOption("streams-from-inventory")
+ except RuntimeError:
+ pass
+
+ if not self._streams and not self._fromInventory:
+ print("Provide streamID(s): --stream-id or --streams-from-inventory",
+ file=sys.stderr)
+ return False
+
+ try:
+ self._outfile = self.commandline().optionString("output")
+ except RuntimeError:
+ print("No output file name given: Sending to stdout",
+ file=sys.stderr)
+
+ try:
+ self._start = self.commandline().optionString("begin")
+ except RuntimeError:
+ print("No begin time given, considering: {}".format(self._start),
+ file=sys.stderr)
+
+ try:
+ self._end = self.commandline().optionString("end")
+ except RuntimeError:
+ print("No end time given, considering 'now': {}".format(self._end),
+ file=sys.stderr)
+
+ try:
+ self._parameter = self.commandline().optionString("parameter")
+ except RuntimeError:
+ print("No QC parameter given, using default", file=sys.stderr)
+
+ try:
+ self._formatted = self.commandline().hasOption("formatted")
+ except RuntimeError:
+ pass
+
+ return True
+
+ def run(self):
+ if not self.query():
+ print("No database connection!\n", file=sys.stderr)
+ return False
+
+ streams = self._streams
+ if not streams and self._fromInventory:
+ try:
+ streams = getStreamsFromInventory(self)
+ except RuntimeError:
+ print("No streams read from database!\n", file=sys.stderr)
+ return False
+
+ if not streams:
+ print("Empty stream list")
+ return False
+
+ for stream in streams:
+ if re.search("[*?]", stream):
+ print("Wildcards in streamID are not supported: {}\n"
+ .format(stream), file=sys.stderr)
+ return False
+
+ print("Request:", file=sys.stderr)
+ print(" streams: {}".format(str(streams)), file=sys.stderr)
+ print(" number of streams: {}".format(len(streams)), file=sys.stderr)
+ print(" begin time: {}".format(str(self._start)), file=sys.stderr)
+ print(" end time: {}".format(str(self._end)), file=sys.stderr)
+ print(" parameters: {}".format(str(self._parameter)),
+ file=sys.stderr)
+ print("Output:", file=sys.stderr)
+ print(" file: {}".format(self._outfile), file=sys.stderr)
+ print(" formatted XML: {}".format(self._formatted), file=sys.stderr)
+
+ # create archive
+ xarc = seiscomp.io.XMLArchive()
+ if not xarc.create(self._outfile, True, True):
+ print("Unable to write XML to {}!\n".format(self._outfile),
+ file=sys.stderr)
+ return False
+ xarc.setFormattedOutput(self._formatted)
+ qc = seiscomp.datamodel.QualityControl()
+
+ # write parameters
+ for parameter in self._parameter.split(","):
+ for stream in streams:
+ (net, sta, loc, cha) = stream.split(".")
+ it = self.query().getWaveformQuality(seiscomp.datamodel.WaveformStreamID(net, sta, loc, cha, ""),
+ parameter,
+ seiscomp.core.Time.FromString(
+ self._start, "%Y-%m-%d %H:%M:%S"),
+ seiscomp.core.Time.FromString(self._end, "%Y-%m-%d %H:%M:%S"))
+
+ while it.get():
+ try:
+ wfq = seiscomp.datamodel.WaveformQuality.Cast(it.get())
+ qc.add(wfq)
+ except Exception:
+ pass
+ it.step()
+
+ xarc.writeObject(qc)
+ xarc.close()
+ return True
+
+
+app = WfqQuery(len(sys.argv), sys.argv)
+sys.exit(app())
diff --git a/bin/screloc b/bin/screloc
new file mode 100755
index 0000000..cdef1e0
Binary files /dev/null and b/bin/screloc differ
diff --git a/bin/scrttv b/bin/scrttv
new file mode 100755
index 0000000..e445b41
Binary files /dev/null and b/bin/scrttv differ
diff --git a/bin/scsendjournal b/bin/scsendjournal
new file mode 100755
index 0000000..61b5a00
--- /dev/null
+++ b/bin/scsendjournal
@@ -0,0 +1,83 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+import sys
+import seiscomp.core
+import seiscomp.client
+import seiscomp.datamodel
+
+
+class SendJournal(seiscomp.client.Application):
+ def __init__(self, argc, argv):
+ seiscomp.client.Application.__init__(self, argc, argv)
+ self.setDatabaseEnabled(False, False)
+ self.setMessagingEnabled(True)
+ self.setMessagingUsername("")
+ self.setPrimaryMessagingGroup("EVENT")
+
+ def init(self):
+ if not seiscomp.client.Application.init(self):
+ return False
+ self.params = self.commandline().unrecognizedOptions()
+ if len(self.params) < 2:
+ sys.stderr.write(
+ self.name() + " [opts] {objectID} {action} [parameters]\n")
+ return False
+ return True
+
+ def printUsage(self):
+
+ print('''Usage:
+ scsendjournal [options]
+
+Send journaling information to the messaging to manipulate event parameters''')
+
+ seiscomp.client.Application.printUsage(self)
+
+ print('''Examples:
+Set the type of the event with ID gempa2021abcd to 'earthquake'
+ scsendjournal -H localhost gempa2021abcd EvType "earthquake"
+''')
+
+ def run(self):
+ msg = seiscomp.datamodel.NotifierMessage()
+
+ entry = seiscomp.datamodel.JournalEntry()
+ entry.setCreated(seiscomp.core.Time.GMT())
+ entry.setObjectID(self.params[0])
+ entry.setSender(self.author())
+ entry.setAction(self.params[1])
+
+ sys.stderr.write(
+ "Sending entry (" + entry.objectID() + "," + entry.action() + ")\n")
+
+ if len(self.params) > 2:
+ entry.setParameters(self.params[2])
+
+ n = seiscomp.datamodel.Notifier(
+ seiscomp.datamodel.Journaling.ClassName(), seiscomp.datamodel.OP_ADD, entry)
+ msg.attach(n)
+ self.connection().send(msg)
+
+ return True
+
+
+def main(argc, argv):
+ app = SendJournal(argc, argv)
+ return app()
+
+
+if __name__ == "__main__":
+ sys.exit(main(len(sys.argv), sys.argv))
diff --git a/bin/scsendorigin b/bin/scsendorigin
new file mode 100755
index 0000000..ee92953
--- /dev/null
+++ b/bin/scsendorigin
@@ -0,0 +1,94 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+import sys
+import seiscomp.core
+import seiscomp.datamodel
+import seiscomp.client
+import seiscomp.logging
+
+
+class SendOrigin(seiscomp.client.Application):
+
+ def __init__(self, argc, argv):
+ seiscomp.client.Application.__init__(self, argc, argv)
+ self.setDatabaseEnabled(False, False)
+ self.setMessagingEnabled(True)
+ self.setPrimaryMessagingGroup("GUI")
+
+ def init(self):
+ if not seiscomp.client.Application.init(self):
+ return False
+
+ try:
+ cstr = self.commandline().optionString("coord")
+ tstr = self.commandline().optionString("time")
+ except:
+ sys.stderr.write(
+ "Must specify origin using '--coord lat,lon,dep --time time'\n")
+ return False
+
+ self.origin = seiscomp.datamodel.Origin.Create()
+
+ ci = seiscomp.datamodel.CreationInfo()
+ ci.setAgencyID(self.agencyID())
+ ci.setCreationTime(seiscomp.core.Time.GMT())
+ self.origin.setCreationInfo(ci)
+
+ lat, lon, dep = list(map(float, cstr.split(",")))
+ self.origin.setLongitude(seiscomp.datamodel.RealQuantity(lon))
+ self.origin.setLatitude(seiscomp.datamodel.RealQuantity(lat))
+ self.origin.setDepth(seiscomp.datamodel.RealQuantity(dep))
+
+ time = seiscomp.core.Time()
+ time.fromString(tstr.replace("/", "-") + ":0:0", "%F %T")
+ self.origin.setTime(seiscomp.datamodel.TimeQuantity(time))
+
+ return True
+
+ def createCommandLineDescription(self):
+ try:
+ self.commandline().addGroup("Parameters")
+ self.commandline().addStringOption("Parameters",
+ "coord",
+ "Latitude,longitude,depth of origin")
+ self.commandline().addStringOption("Parameters",
+ "time", "time of origin")
+ except:
+ seiscomp.logging.warning("caught unexpected error %s" % sys.exc_info())
+
+ def printUsage(self):
+ print('''Usage:
+ scsendorigin [options]
+
+Create an artificial origin and send to the messaging''')
+
+ seiscomp.client.Application.printUsage(self)
+
+ print('''Examples:
+Send an artificial origin with hypocenter parameters to the messaging
+ scsendorigin --time "2022-05-01 10:00:00" --coord 52,12,10
+''')
+
+ def run(self):
+ msg = seiscomp.datamodel.ArtificialOriginMessage(self.origin)
+ self.connection().send(msg)
+ return True
+
+
+app = SendOrigin(len(sys.argv), sys.argv)
+# app.setName("scsendorigin")
+app.setMessagingUsername("scsendorg")
+sys.exit(app())
diff --git a/bin/scshowevent b/bin/scshowevent
new file mode 100755
index 0000000..6c10088
Binary files /dev/null and b/bin/scshowevent differ
diff --git a/bin/scsohlog b/bin/scsohlog
new file mode 100755
index 0000000..e90c3d2
--- /dev/null
+++ b/bin/scsohlog
@@ -0,0 +1,395 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+import sys, os, re
+import seiscomp.core, seiscomp.client, seiscomp.logging, seiscomp.system
+
+
+"""
+Monitor application that connects to the messaging and collects all
+information on the STATUS_GROUP to create an XML file ever N seconds.
+It can furthermore call a configured script to trigger processing of the
+produced XML file.
+"""
+
+inputRegEx = re.compile("in\((?P[^\)]*)\)")
+outputRegEx = re.compile("out\((?P[^\)]*)\)")
+
+
+# Define all units of measure for available system SOH tags. Tags that are
+# not given here are not processed.
+Tests = {
+ "cpuusage": "%",
+ "clientmemoryusage": "kB",
+ "sentmessages": "cnt",
+ "receivedmessages": "cnt",
+ "messagequeuesize": "cnt",
+ "objectcount": "cnt",
+ "uptime": "s",
+ "dbadds": "row/s",
+ "dbupdates": "row/s",
+ "dbdeletes": "row/s"
+}
+
+
+#----------------------------------------------------------------------------
+# Class TestLog to hold the properties of a test. It also creates XML.
+#----------------------------------------------------------------------------
+class TestLog:
+ def __init__(self):
+ self.value = None
+ self.uom = None
+ self.update = None
+
+ def toXML(self, f, name):
+ f.write('= 1E-6:
+ f.write(' value="%f"' % fvalue)
+ else:
+ f.write(' value="%d"' % int(fvalue))
+ except:
+ f.write(' value="%s"' % self.value)
+ if self.uom:
+ f.write(' uom="%s"' % self.uom)
+ if self.update:
+ f.write(' updateTime="%s"' % self.update)
+ f.write('/>')
+
+
+#----------------------------------------------------------------------------
+# Class ObjectLog to hold the properties of a object log. It also creates
+# XML.
+#----------------------------------------------------------------------------
+class ObjectLog:
+ def __init__(self):
+ self.count = None
+ self.average = None
+ self.timeWindow = None
+ self.last = None
+ self.update = None
+
+ def toXML(self, f, name, channel):
+ f.write('')
+
+
+#----------------------------------------------------------------------------
+# Class Client that holds all tests and object logs of a particular client
+# (messaging user name).
+#----------------------------------------------------------------------------
+class Client:
+ def __init__(self):
+ self.pid = None
+ self.progname = None
+ self.host = None
+
+ self.inputLogs = dict()
+ self.outputLogs = dict()
+ self.tests = dict()
+
+ #----------------------------------------------------------------------------
+ # Update/add (system) tests based on the passed tests dictionary retrieved
+ # from a status message.
+ #----------------------------------------------------------------------------
+ def updateTests(self, updateTime, tests):
+ for name, value in list(tests.items()):
+ if name == "pid":
+ self.pid = value
+ elif name == "programname":
+ self.progname = value
+ elif name == "hostname":
+ self.host = value
+
+ if name not in Tests:
+ continue
+
+ # Convert d:h:m:s to seconds
+ if name == "uptime":
+ try:
+ t = [int(v) for v in value.split(":")]
+ except:
+ continue
+ if len(t) != 4:
+ continue
+ value = str(t[0]*86400+t[1]*3600+t[2]*60+t[3])
+
+ if name not in self.tests:
+ log = TestLog()
+ log.uom = Tests[name]
+ self.tests[name] = log
+ else:
+ log = self.tests[name]
+ log.value = value
+ log.update = updateTime
+
+ #----------------------------------------------------------------------------
+ # Update/add object logs based on the passed log text. The content is parsed.
+ #----------------------------------------------------------------------------
+ def updateObjects(self, updateTime, log):
+ # Check input structure
+ v = inputRegEx.search(log)
+ if not v:
+ # Check out structure
+ v = outputRegEx.search(log)
+ if not v:
+ return
+ logs = self.outputLogs
+ else:
+ logs = self.inputLogs
+
+ try:
+ tmp = v.group('params').split(',')
+ except:
+ return
+
+ params = dict()
+ for p in tmp:
+ try:
+ param, value = p.split(':', 1)
+ except:
+ continue
+ params[param] = value
+
+ name = params.get("name", "")
+ channel = params.get("chan", "")
+ if (name, channel) not in logs:
+ logObj = ObjectLog()
+ logs[(name, channel)] = logObj
+ else:
+ logObj = logs[(name, channel)]
+
+ logObj.update = updateTime
+ logObj.count = params.get("cnt")
+ logObj.average = params.get("avg")
+ logObj.timeWindow = params.get("tw")
+ logObj.last = params.get("last")
+
+ def toXML(self, f, name):
+ f.write('')
+ for name, log in list(self.tests.items()):
+ log.toXML(f, name)
+ if len(self.inputLogs) > 0:
+ f.write('')
+ for id, log in list(self.inputLogs.items()):
+ log.toXML(f, id[0], id[1])
+ f.write('')
+ if len(self.outputLogs) > 0:
+ f.write('')
+ f.write("")
+
+
+#----------------------------------------------------------------------------
+# SC3 application class Monitor
+#----------------------------------------------------------------------------
+class Monitor(seiscomp.client.Application):
+ def __init__(self, argc, argv):
+ seiscomp.client.Application.__init__(self, argc, argv)
+ self.setDatabaseEnabled(False, False)
+ self.setMembershipMessagesEnabled(True);
+ self.addMessagingSubscription(seiscomp.client.Protocol.STATUS_GROUP)
+ self.setMessagingUsername("")
+ self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP)
+ self._clients = dict()
+ self._outputScript = None
+ self._outputFile = "@LOGDIR@/server.xml"
+ self._outputInterval = 60
+
+ def createCommandLineDescription(self):
+ try:
+ self.commandline().addGroup("Output")
+ self.commandline().addStringOption("Output", "file,o",
+ "Specify the output file to create")
+ self.commandline().addIntOption("Output", "interval,i",
+ "Specify the output interval in seconds (default: 60)")
+ self.commandline().addStringOption("Output", "script",
+ "Specify an output script to be called after the output file is generated")
+ except:
+ seiscomp.logging.warning(
+ "caught unexpected error %s" % sys.exc_info())
+ return True
+
+ def initConfiguration(self):
+ if not seiscomp.client.Application.initConfiguration(self):
+ return False
+
+ try:
+ self._outputFile = self.configGetString("monitor.output.file")
+ except:
+ pass
+
+ try:
+ self._outputInterval = self.configGetInt("monitor.output.interval")
+ except:
+ pass
+
+ try:
+ self._outputScript = self.configGetString("monitor.output.script")
+ except:
+ pass
+
+ return True
+
+ def init(self):
+ if not seiscomp.client.Application.init(self):
+ return False
+
+ try:
+ self._outputFile = self.commandline().optionString("file")
+ except:
+ pass
+
+ try:
+ self._outputInterval = self.commandline().optionInt("interval")
+ except:
+ pass
+
+ try:
+ self._outputScript = self.commandline().optionString("script")
+ except:
+ pass
+
+ self._outputFile = seiscomp.system.Environment.Instance().absolutePath(self._outputFile)
+ seiscomp.logging.info("Output file: %s" % self._outputFile)
+
+ if self._outputScript:
+ self._outputScript = seiscomp.system.Environment.Instance().absolutePath(self._outputScript)
+ seiscomp.logging.info("Output script: %s" % self._outputScript)
+
+ self._monitor = self.addInputObjectLog("status", seiscomp.client.Protocol.STATUS_GROUP)
+ self.enableTimer(self._outputInterval)
+ seiscomp.logging.info("Starting output timer with %d secs" % self._outputInterval)
+
+ return True
+
+ def printUsage(self):
+ print('''Usage:
+ scsohlog [options]
+
+Connect to the messaging collecting information sent from connected clients''')
+
+ seiscomp.client.Application.printUsage(self)
+
+ print('''Examples:
+Create an output XML file every 60 seconds and execute a custom script to process the XML file
+ scsohlog -o stat.xml -i 60 --script process-stat.sh
+''')
+ def handleNetworkMessage(self, msg):
+ # A state of health message
+ if msg.type == seiscomp.client.Packet.Status:
+ data = filter(None, msg.payload.split("&"))
+ self.updateStatus(msg.subject, data)
+
+ # If a client disconnected, remove it from the list
+ elif msg.type == seiscomp.client.Packet.Disconnected:
+ if msg.subject in self._clients:
+ del self._clients[msg.subject]
+
+ def handleDisconnect(self):
+ # If we got disconnected all client states are deleted
+ self._clients = dict()
+
+ #----------------------------------------------------------------------------
+ # Timeout handler called by the Application class.
+ # Write XML to configured output file and trigger configured script.
+ #----------------------------------------------------------------------------
+ def handleTimeout(self):
+ if self._outputFile == "-":
+ self.toXML(sys.stdout)
+ sys.stdout.write("\n")
+ return
+
+ try:
+ f = open(self._outputFile, "w")
+ except:
+ seiscomp.logging.error(
+ "Unable to create output file: %s" % self._outputFile)
+ return
+
+ self.toXML(f)
+ f.close()
+
+ if self._outputScript:
+ os.system(self._outputScript + " " + self._outputFile)
+
+ #----------------------------------------------------------------------------
+ # Write XML to stream f
+ #----------------------------------------------------------------------------
+ def toXML(self, f):
+ f.write('')
+ f.write('' % self.messagingURL())
+ for name, client in list(self._clients.items()):
+ client.toXML(f, name)
+ f.write('')
+
+ def updateStatus(self, name, items):
+ if name not in self._clients:
+ self._clients[name] = Client()
+
+ now = seiscomp.core.Time.GMT()
+ client = self._clients[name]
+ self.logObject(self._monitor, now)
+
+ params = dict()
+ objs = []
+
+ for t in items:
+ try:
+ param, value = t.split("=", 1)
+ params[param] = value
+ except:
+ objs.append(t)
+
+ if "time" in params:
+ update = params["time"]
+ del params["time"]
+ else:
+ update = now.iso()
+
+ client.updateTests(update, params)
+ for o in objs:
+ client.updateObjects(update, o)
+ #client.toXML(sys.stdout, name)
+
+
+app = Monitor(len(sys.argv), sys.argv)
+sys.exit(app())
+
diff --git a/bin/scvoice b/bin/scvoice
new file mode 100755
index 0000000..491a686
--- /dev/null
+++ b/bin/scvoice
@@ -0,0 +1,502 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+############################################################################
+
+import sys
+import subprocess
+import traceback
+
+from seiscomp import (client, core, datamodel, logging, seismology, system,
+ math)
+
+class VoiceAlert(client.Application):
+
+ def __init__(self, argc, argv):
+ client.Application.__init__(self, argc, argv)
+
+ self.setMessagingEnabled(True)
+ self.setDatabaseEnabled(True, True)
+ self.setLoadRegionsEnabled(True)
+ self.setMessagingUsername("")
+ self.setPrimaryMessagingGroup(client.Protocol.LISTENER_GROUP)
+ self.addMessagingSubscription("EVENT")
+ self.addMessagingSubscription("LOCATION")
+ self.addMessagingSubscription("MAGNITUDE")
+
+ self.setAutoApplyNotifierEnabled(True)
+ self.setInterpretNotifierEnabled(True)
+
+ self.setLoadCitiesEnabled(True)
+ self.setLoadRegionsEnabled(True)
+
+ self._ampType = "snr"
+ self._citiesMaxDist = 20
+ self._citiesMinPopulation = 50000
+
+ self._cache = None
+ self._eventDescriptionPattern = None
+ self._ampScript = None
+ self._alertScript = None
+ self._eventScript = None
+
+ self._ampProc = None
+ self._alertProc = None
+ self._eventProc = None
+
+ self._newWhenFirstSeen = False
+ self._prevMessage = {}
+ self._agencyIDs = []
+
+ def createCommandLineDescription(self):
+ self.commandline().addOption(
+ "Generic", "first-new", "calls an event a new event when it is "
+ "seen the first time")
+ self.commandline().addGroup("Alert")
+ self.commandline().addStringOption(
+ "Alert", "amp-type", "specify the amplitude type to listen to",
+ self._ampType)
+ self.commandline().addStringOption(
+ "Alert", "amp-script", "specify the script to be called when a "
+ "stationamplitude arrived, network-, stationcode and amplitude are "
+ "passed as parameters $1, $2 and $3")
+ self.commandline().addStringOption(
+ "Alert", "alert-script", "specify the script to be called when a "
+ "preliminary origin arrived, latitude and longitude are passed as "
+ "parameters $1 and $2")
+ self.commandline().addStringOption(
+ "Alert", "event-script", "specify the script to be called when an "
+ "event has been declared; the message string, a flag (1=new event, "
+ "0=update event), the EventID, the arrival count and the magnitude "
+ "(optional when set) are passed as parameter $1, $2, $3, $4 and $5")
+ self.commandline().addGroup("Cities")
+ self.commandline().addStringOption(
+ "Cities", "max-dist", "maximum distance for using the distance "
+ "from a city to the earthquake")
+ self.commandline().addStringOption(
+ "Cities", "min-population", "minimum population for a city to "
+ "become a point of interest")
+ self.commandline().addGroup("Debug")
+ self.commandline().addStringOption(
+ "Debug", "eventid,E", "specify Event ID")
+ return True
+
+ def init(self):
+ if not client.Application.init(self):
+ return False
+
+ try:
+ self._newWhenFirstSeen = self.configGetBool("firstNew")
+ except BaseException:
+ pass
+
+ try:
+ agencyIDs = self.configGetStrings("agencyIDs")
+ for item in agencyIDs:
+ item = item.strip()
+ if item not in self._agencyIDs:
+ self._agencyIDs.append(item)
+ except BaseException:
+ pass
+
+ try:
+ if self.commandline().hasOption("first-new"):
+ self._newWhenFirstSeen = True
+ except BaseException:
+ pass
+
+ try:
+ self._eventDescriptionPattern = self.configGetString("poi.message")
+ except BaseException:
+ pass
+
+ try:
+ self._citiesMaxDist = self.configGetDouble("poi.maxDist")
+ except BaseException:
+ pass
+
+ try:
+ self._citiesMaxDist = self.commandline().optionDouble("max-dist")
+ except BaseException:
+ pass
+
+ try:
+ self._citiesMinPopulation = self.configGetInt("poi.minPopulation")
+ except BaseException:
+ pass
+
+ try:
+ self._citiesMinPopulation = self.commandline().optionInt("min-population")
+ except BaseException:
+ pass
+
+ try:
+ self._ampType = self.commandline().optionString("amp-type")
+ except BaseException:
+ pass
+
+ try:
+ self._ampScript = self.commandline().optionString("amp-script")
+ except BaseException:
+ try:
+ self._ampScript = self.configGetString("scripts.amplitude")
+ except BaseException:
+ logging.warning("No amplitude script defined")
+
+ if self._ampScript:
+ self._ampScript = system.Environment.Instance().absolutePath(self._ampScript)
+
+ try:
+ self._alertScript = self.commandline().optionString("alert-script")
+ except BaseException:
+ try:
+ self._alertScript = self.configGetString("scripts.alert")
+ except BaseException:
+ logging.warning("No alert script defined")
+
+ if self._alertScript:
+ self._alertScript = system.Environment.Instance(
+ ).absolutePath(self._alertScript)
+
+ try:
+ self._eventScript = self.commandline().optionString("event-script")
+ except BaseException:
+ try:
+ self._eventScript = self.configGetString("scripts.event")
+ logging.info(
+ "Using event script: %s" % self._eventScript)
+ except BaseException:
+ logging.warning("No event script defined")
+
+ if self._eventScript:
+ self._eventScript = system.Environment.Instance() \
+ .absolutePath(self._eventScript)
+
+ logging.info("Creating ringbuffer for 100 objects")
+ if not self.query():
+ logging.warning(
+ "No valid database interface to read from")
+ self._cache = datamodel.PublicObjectRingBuffer(
+ self.query(), 100)
+
+ if self._ampScript and self.connection():
+ self.connection().subscribe("AMPLITUDE")
+
+ if self._newWhenFirstSeen:
+ logging.info(
+ "A new event is declared when I see it the first time")
+
+ if not self._agencyIDs:
+ logging.info("agencyIDs: []")
+ else:
+ logging.info(
+ "agencyIDs: %s" % (" ".join(self._agencyIDs)))
+
+ return True
+
+ def printUsage(self):
+
+ print('''Usage:
+ scvoice [options]
+
+Alert the user acoustically in real time.
+''')
+
+ client.Application.printUsage(self)
+
+ print('''Examples:
+Execute scvoice on command line with debug output
+ scvoice --debug
+''')
+
+ def run(self):
+ try:
+ try:
+ eventID = self.commandline().optionString("eventid")
+ event = self._cache.get(datamodel.Event, eventID)
+ if event:
+ self.notifyEvent(event)
+ except BaseException:
+ pass
+
+ return client.Application.run(self)
+ except BaseException:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+ return False
+
+ def runAmpScript(self, net, sta, amp):
+ if not self._ampScript:
+ return
+
+ if self._ampProc is not None:
+ if self._ampProc.poll() is None:
+ logging.warning(
+ "AmplitudeScript still in progress -> skipping message")
+ return
+ try:
+ self._ampProc = subprocess.Popen(
+ [self._ampScript, net, sta, "%.2f" % amp])
+ logging.info(
+ "Started amplitude script with pid %d" % self._ampProc.pid)
+ except BaseException:
+ logging.error(
+ "Failed to start amplitude script '%s'" % self._ampScript)
+
+ def runAlert(self, lat, lon):
+ if not self._alertScript:
+ return
+
+ if self._alertProc is not None:
+ if self._alertProc.poll() is None:
+ logging.warning(
+ "AlertScript still in progress -> skipping message")
+ return
+ try:
+ self._alertProc = subprocess.Popen(
+ [self._alertScript, "%.1f" % lat, "%.1f" % lon])
+ logging.info(
+ "Started alert script with pid %d" % self._alertProc.pid)
+ except BaseException:
+ logging.error(
+ "Failed to start alert script '%s'" % self._alertScript)
+
+ def handleMessage(self, msg):
+ try:
+ dm = core.DataMessage.Cast(msg)
+ if dm:
+ for att in dm:
+ org = datamodel.Origin.Cast(att)
+ if not org:
+ continue
+
+ try:
+ if org.evaluationStatus() == datamodel.PRELIMINARY:
+ self.runAlert(org.latitude().value(),
+ org.longitude().value())
+ except BaseException:
+ pass
+
+ #ao = datamodel.ArtificialOriginMessage.Cast(msg)
+ # if ao:
+ # org = ao.origin()
+ # if org:
+ # self.runAlert(org.latitude().value(), org.longitude().value())
+ # return
+
+ client.Application.handleMessage(self, msg)
+ except BaseException:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+
+ def addObject(self, parentID, arg0):
+ #pylint: disable=W0622
+ try:
+ obj = datamodel.Amplitude.Cast(arg0)
+ if obj:
+ if obj.type() == self._ampType:
+ logging.debug("got new %s amplitude '%s'" % (
+ self._ampType, obj.publicID()))
+ self.notifyAmplitude(obj)
+
+ obj = datamodel.Origin.Cast(arg0)
+ if obj:
+ self._cache.feed(obj)
+ logging.debug("got new origin '%s'" % obj.publicID())
+
+ try:
+ if obj.evaluationStatus() == datamodel.PRELIMINARY:
+ self.runAlert(obj.latitude().value(),
+ obj.longitude().value())
+ except BaseException:
+ pass
+
+ return
+
+ obj = datamodel.Magnitude.Cast(arg0)
+ if obj:
+ self._cache.feed(obj)
+ logging.debug(
+ "got new magnitude '%s'" % obj.publicID())
+ return
+
+ obj = datamodel.Event.Cast(arg0)
+ if obj:
+ org = self._cache.get(
+ datamodel.Origin, obj.preferredOriginID())
+ agencyID = org.creationInfo().agencyID()
+ logging.debug("got new event '%s'" % obj.publicID())
+ if not self._agencyIDs or agencyID in self._agencyIDs:
+ self.notifyEvent(obj, True)
+ except BaseException:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+
+ def updateObject(self, parentID, arg0):
+ try:
+ obj = datamodel.Event.Cast(arg0)
+ if obj:
+ org = self._cache.get(datamodel.Origin, obj.preferredOriginID())
+ agencyID = org.creationInfo().agencyID()
+ logging.debug("update event '%s'" % obj.publicID())
+ if not self._agencyIDs or agencyID in self._agencyIDs:
+ self.notifyEvent(obj, False)
+ except BaseException:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+
+ def notifyAmplitude(self, amp):
+ self.runAmpScript(amp.waveformID().networkCode(),
+ amp.waveformID().stationCode(),
+ amp.amplitude().value())
+
+ def notifyEvent(self, evt, newEvent=True):
+ try:
+ org = self._cache.get(datamodel.Origin, evt.preferredOriginID())
+ if not org:
+ logging.warning("unable to get origin %s, ignoring event "
+ "message" % evt.preferredOriginID())
+ return
+
+ preliminary = False
+ try:
+ if org.evaluationStatus() == datamodel.PRELIMINARY:
+ preliminary = True
+ except BaseException:
+ pass
+
+ if not preliminary:
+ nmag = self._cache.get(
+ datamodel.Magnitude, evt.preferredMagnitudeID())
+ if nmag:
+ mag = nmag.magnitude().value()
+ mag = "magnitude %.1f" % mag
+ else:
+ if len(evt.preferredMagnitudeID()) > 0:
+ logging.warning(
+ "unable to get magnitude %s, ignoring event "
+ "message" % evt.preferredMagnitudeID())
+ else:
+ logging.warning(
+ "no preferred magnitude yet, ignoring event message")
+ return
+
+ # keep track of old events
+ if self._newWhenFirstSeen:
+ if evt.publicID() in self._prevMessage:
+ newEvent = False
+ else:
+ newEvent = True
+
+ dsc = seismology.Regions.getRegionName(
+ org.latitude().value(), org.longitude().value())
+
+ if self._eventDescriptionPattern:
+ try:
+ city, dist, _ = self.nearestCity(
+ org.latitude().value(), org.longitude().value(),
+ self._citiesMaxDist, self._citiesMinPopulation)
+ if city:
+ dsc = self._eventDescriptionPattern
+ region = seismology.Regions.getRegionName(
+ org.latitude().value(), org.longitude().value())
+ distStr = str(int(math.deg2km(dist)))
+ dsc = dsc.replace("@region@", region).replace(
+ "@dist@", distStr).replace("@poi@", city.name())
+ except BaseException:
+ pass
+
+ logging.debug("desc: %s" % dsc)
+
+ dep = org.depth().value()
+ now = core.Time.GMT()
+ otm = org.time().value()
+
+ dt = (now - otm).seconds()
+
+ # if dt > dtmax:
+ # return
+
+ if dt > 3600:
+ dt = "%d hours %d minutes ago" % (dt/3600, (dt % 3600)/60)
+ elif dt > 120:
+ dt = "%d minutes ago" % (dt/60)
+ else:
+ dt = "%d seconds ago" % dt
+
+ if preliminary:
+ message = "earthquake, preliminary, %%s, %s" % dsc
+ else:
+ message = "earthquake, %%s, %s, %s, depth %d kilometers" % (
+ dsc, mag, int(dep+0.5))
+ # at this point the message lacks the "ago" part
+
+ if evt.publicID() in self._prevMessage and \
+ self._prevMessage[evt.publicID()] == message:
+ logging.info("Suppressing repeated message '%s'" % message)
+ return
+
+ self._prevMessage[evt.publicID()] = message
+ message = message % dt # fill the "ago" part
+ logging.info(message)
+
+ if not self._eventScript:
+ return
+
+ if self._eventProc is not None:
+ if self._eventProc.poll() is None:
+ logging.warning(
+ "EventScript still in progress -> skipping message")
+ return
+
+ try:
+ param2 = 0
+ param3 = 0
+ param4 = ""
+ if newEvent:
+ param2 = 1
+
+ org = self._cache.get(
+ datamodel.Origin, evt.preferredOriginID())
+ if org:
+ try:
+ param3 = org.quality().associatedPhaseCount()
+ except BaseException:
+ pass
+
+ nmag = self._cache.get(
+ datamodel.Magnitude, evt.preferredMagnitudeID())
+ if nmag:
+ param4 = "%.1f" % nmag.magnitude().value()
+
+ self._eventProc = subprocess.Popen(
+ [self._eventScript, message, "%d" % param2, evt.publicID(),
+ "%d" % param3, param4])
+ logging.info(
+ "Started event script with pid %d" % self._eventProc.pid)
+ except BaseException:
+ logging.error(
+ "Failed to start event script '%s %s %d %d %s'" % (
+ self._eventScript, message, param2, param3, param4))
+ except BaseException:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+
+
+app = VoiceAlert(len(sys.argv), sys.argv)
+sys.exit(app())
diff --git a/bin/scwfas b/bin/scwfas
new file mode 100755
index 0000000..4ecd714
Binary files /dev/null and b/bin/scwfas differ
diff --git a/bin/scwfparam b/bin/scwfparam
new file mode 100755
index 0000000..a0ea77a
Binary files /dev/null and b/bin/scwfparam differ
diff --git a/bin/scxmldump b/bin/scxmldump
new file mode 100755
index 0000000..921fa45
Binary files /dev/null and b/bin/scxmldump differ
diff --git a/bin/scxmlmerge b/bin/scxmlmerge
new file mode 100755
index 0000000..ad1938a
Binary files /dev/null and b/bin/scxmlmerge differ
diff --git a/bin/sczip b/bin/sczip
new file mode 100755
index 0000000..97c009b
Binary files /dev/null and b/bin/sczip differ
diff --git a/bin/seiscomp b/bin/seiscomp
new file mode 100755
index 0000000..4e0fd7f
--- /dev/null
+++ b/bin/seiscomp
@@ -0,0 +1,55 @@
+#!/bin/sh -e
+
+# Resolve softlink to seiscomp executable first
+if test -L "$0"
+then
+ # $0 is a link
+ target="$(readlink "$0")"
+ case "$target" in
+ /*)
+ d="$target"
+ ;;
+ *)
+ d="$(dirname "$0")/$target"
+ ;;
+ esac
+else
+ # $0 is NOT a link
+ case "$0" in
+ */* | /*)
+ d="$0"
+ ;;
+ *)
+ d="$(command -v "$0")"
+ ;;
+ esac
+fi
+
+normalized_dirname() {
+ # Normalize directory name without following symlinks.
+ # Brute-force but portable.
+ cd "${1%/*}" && pwd || exit 1
+}
+
+# Determine the root directory of the 'seiscomp' utility.
+d="$(normalized_dirname "$d")"
+SEISCOMP_ROOT="$(realpath "${d%/bin}")"
+
+export SEISCOMP_ROOT
+export PATH="$SEISCOMP_ROOT/bin:$PATH"
+export LD_LIBRARY_PATH="$SEISCOMP_ROOT/lib:$LD_LIBRARY_PATH"
+export PYTHONPATH="$SEISCOMP_ROOT/lib/python:$PYTHONPATH"
+export MANPATH="$SEISCOMP_ROOT/share/man:$MANPATH"
+
+HOSTENV=$SEISCOMP_ROOT/etc/env/by-hostname/$(hostname)
+test -f $HOSTENV && . $HOSTENV
+
+case $1 in
+ exec)
+ shift
+ exec "$@"
+ ;;
+ *)
+ exec $SEISCOMP_ROOT/bin/seiscomp-python "$SEISCOMP_ROOT/bin/seiscomp-control.py" "$@"
+ ;;
+esac
diff --git a/bin/seiscomp-control.py b/bin/seiscomp-control.py
new file mode 100755
index 0000000..98eac0a
--- /dev/null
+++ b/bin/seiscomp-control.py
@@ -0,0 +1,1534 @@
+#!/usr/bin/env seiscomp-python
+
+from __future__ import division, print_function
+
+import glob
+import importlib
+import math
+import os
+import platform
+import shutil
+import signal
+import socket
+import subprocess
+import sys
+import traceback
+import seiscomp.shell
+
+# Problem: if
+# import seiscomp.config
+# fails, then in any case a sometimes misleading exception
+# ImportError: No module named _config
+# is raised, even if the seiscomp._config module exists but for
+# another reason fails to import. We therefore...
+import seiscomp._config
+# ...here explicitly to get a meaningful exception if this fails.
+
+import seiscomp.config
+import seiscomp.kernel
+
+# Python version depended string conversion
+if sys.version_info[0] < 3:
+ py3ustr = str
+else:
+ py3ustr = lambda s: s.decode('utf-8', 'replace')
+
+try:
+ real_raw_input = raw_input
+except NameError:
+ real_raw_input = input
+
+
+# request and optionally enforce user input
+# @param question The question to be answered.
+# @param default The default value to use if no input was made
+# @param options List or string of available options. If defined the input must
+# match one of the options (unless a default is specified). If the input
+# is invalid the question is repeated.
+def getInput(question, default=None, options=None):
+
+ def _default(text):
+ # print default value to previous line if no input was made
+ if default:
+ print("\033[F\033[{}G{}".format(len(text)+1, default))
+ return default
+
+ # no options: accept any type of input
+ if not options:
+ defaultStr = "" if default is None else " [{}]".format(default)
+ question = "{}{}: ".format(question, defaultStr)
+ return real_raw_input(question) or _default(question)
+
+ if default is not None:
+ default = str(default).lower()
+
+ # options supplied: check and enforce input
+ opts = [str(o).lower() for o in options]
+ optStr = "/".join(o.upper() if o == default else o for o in opts)
+ question = "{} [{}]: ".format(question, optStr)
+ while True:
+ res = real_raw_input(question)
+ if not res and default:
+ return _default(question)
+
+ if res.lower() in opts:
+ return res.lower()
+
+
+if sys.platform == "darwin":
+ SysLibraryPathVar = "DYLD_FALLBACK_LIBRARY_PATH"
+ SysFrameworkPathVar = "DYLD_FALLBACK_FRAMEWORK_PATH"
+else:
+ SysLibraryPathVar = "LD_LIBRARY_PATH"
+ SysFrameworkPathVar = None
+
+
+def get_library_path():
+ if sys.platform == "darwin":
+ return LD_LIBRARY_PATH + ":" + DYLD_FALLBACK_FRAMEWORK_PATH
+
+ return LD_LIBRARY_PATH
+
+
+def get_framework_path():
+ return DYLD_FALLBACK_FRAMEWORK_PATH
+
+
+# Python 3 compatible string check
+def is_string(variable):
+ try:
+ string_class = basestring
+ except NameError:
+ string_class = str
+
+ return isinstance(variable, string_class)
+
+
+# ------------------------------------------------------------------------------
+# Helper functions
+# ------------------------------------------------------------------------------
+SIGTERM_SENT = False
+
+
+def sigterm_handler(_signum, _):
+ # pylint: disable=W0603
+ global SIGTERM_SENT
+ if not SIGTERM_SENT:
+ SIGTERM_SENT = True
+ os.killpg(0, signal.SIGTERM)
+
+ sys.exit()
+
+
+def system(args):
+ proc = subprocess.Popen(args, shell=False, env=os.environ)
+ while True:
+ try:
+ return proc.wait()
+ except KeyboardInterrupt:
+ continue
+ except Exception as e:
+ try:
+ proc.terminate()
+ except Exception:
+ pass
+ sys.stderr.write("Exception: %s\n" % str(e))
+ continue
+
+ # return subprocess.call(cmd, shell=True)
+
+
+def error(msg):
+ sys.stderr.write("error: %s\n" % msg)
+ sys.stderr.flush()
+
+
+def warning(msg):
+ sys.stderr.write("warning: %s\n" % msg)
+ sys.stderr.flush()
+
+
+# Returns a seiscomp.kernel.Module instance
+# from a given path with a given name
+def load_module(path):
+ modname0 = os.path.splitext(os.path.basename(path))[0].replace('.', '_')
+ modname = '__seiscomp_modules_' + modname0
+
+ if modname in sys.modules:
+ mod = sys.modules[modname]
+ else:
+ if sys.path[0] != INIT_PATH:
+ sys.path.insert(0, INIT_PATH)
+ mod = importlib.import_module(modname0)
+ mod.__file__ = path
+
+ # store it in sys.modules
+ sys.modules[modname] = mod
+
+ module = mod.Module
+
+ return module
+
+
+def module_key(module):
+ return (module.order, module.name)
+
+
+def load_init_modules(path):
+ modules = []
+
+ if not os.path.exists(path):
+ error("Cannot load any module - path not existing: %s" % path)
+ return modules
+
+ files = glob.glob(os.path.join(path, "*.py"))
+ for f in files:
+ try:
+ pmod = load_module(f)
+ except Exception as exc:
+ error(("%s: " % f) + str(exc))
+ continue
+
+ try:
+ mod = pmod(env) # .Module(env)
+ except Exception as exc:
+ error(("%s: " % f) + str(exc))
+ continue
+
+ modules.append(mod)
+
+ #mods = sorted(mods, key=lambda mod: mod.order)
+ modules = sorted(modules, key=module_key)
+
+ return modules
+
+
+def get_module(name):
+ for module in mods:
+ if module.name == name:
+ return module
+ return None
+
+
+def has_module(name):
+ return get_module(name) is not None
+
+
+def dump_paths():
+ print('--------------------')
+ print('SEISCOMP_ROOT="%s"' % SEISCOMP_ROOT)
+ print('PATH="%s"' % os.environ["PATH"])
+ print('%s="%s"' % (SysLibraryPathVar, os.environ[SysLibraryPathVar]))
+ if SysFrameworkPathVar:
+ print(
+ '%s="%s"' %
+ (SysFrameworkPathVar,
+ os.environ[SysFrameworkPathVar]))
+ print('PYTHONPATH="%s"' % sys.path)
+ print('CWD="%s"' % os.getcwd())
+ print('--------------------')
+
+
+# Returns whether a module should run or not. It simply returns if its
+# runfile exists.
+def shouldModuleRun(mod_name):
+ return os.path.exists(env.runFile(mod_name))
+
+
+def touch(filename):
+ try:
+ open(filename, 'w').close()
+ except Exception as exc:
+ error(str(exc))
+
+
+def start_module(mod):
+ # Create runfile
+ touch(env.runFile(mod.name))
+ return mod.start()
+
+
+def stop_module(mod):
+ try:
+ if not mod.stop():
+ error("Failed to stop %s: unknown error" % mod.name)
+ return 1
+ except Exception as e:
+ error("Failed to stop %s: %s" % (mod.name, str(e)))
+ return 1
+
+ # Delete runfile
+ try:
+ os.remove(env.runFile(mod.name))
+ except BaseException:
+ return 1
+
+ return 0
+
+
+def start_kernel_modules():
+ for mod in mods:
+ if isinstance(mod, seiscomp.kernel.CoreModule):
+ return start_module(mod)
+
+ return 1
+
+
+def stop_kernel_modules():
+ for mod in reversed(mods):
+ if isinstance(mod, seiscomp.kernel.CoreModule):
+ return stop_module(mod)
+
+ return 1
+
+def detectOS():
+ OSReleaseMap = {
+ 'centos': 'rhel',
+ 'rocky': 'rhel',
+ 'raspbian': 'debian'
+ }
+
+ try:
+ arch = platform.machine()
+ except BaseException:
+ arch = 'x86_64'
+
+ data = {}
+ with open('/etc/os-release', 'r') as f:
+ for line in f:
+ toks = line.split("=")
+ if len(toks) != 2:
+ continue
+
+ data[toks[0].strip().upper()] = toks[1].strip()
+
+ osID = OSReleaseMap.get(data['ID'].strip('"'))
+ if not osID:
+ osID = data['ID'].strip('"')
+
+ version = data['VERSION_ID'].strip('"')
+ if osID == 'rhel':
+ try:
+ version = str(math.floor(float(version)))
+ except Exception:
+ pass
+
+ name = data['NAME'].strip('"')
+ return name, osID, version, arch
+
+# ------------------------------------------------------------------------------
+# Commandline action handler
+# ------------------------------------------------------------------------------
+def on_setup(args, flags):
+ # pylint: disable=W0621
+ import seiscomp.setup
+
+ if "stdin" in flags:
+ cfg = seiscomp.config.Config()
+ if not cfg.readConfig("-"):
+ error("invalid configuration from stdin")
+ return 1
+ else:
+ setup = seiscomp.setup.Simple()
+ cfg = setup.run(env)
+
+ retCode = 0
+
+ for mod in config_mods:
+ if len(args) == 0 or mod.name in args:
+ try:
+ hasSetupHandler = callable(getattr(mod, 'setup'))
+ except BaseException:
+ hasSetupHandler = False
+
+ if hasSetupHandler:
+ print("* setup %s" % mod.name)
+ if mod.setup(cfg) != 0:
+ error("module '%s' failed to setup" % mod.name)
+ retCode = 1
+
+ if retCode == 0:
+ runpath = os.path.join(SEISCOMP_ROOT, "var", "run")
+ if not os.path.exists(runpath):
+ try:
+ os.makedirs(runpath)
+ except BaseException:
+ error("failed to create directory: %s" % runpath)
+
+ statfile = os.path.join(runpath, "seiscomp.init")
+ if not os.path.exists(statfile):
+ try:
+ open(statfile, "w").close()
+ except BaseException:
+ error("failed to create status file: %s" % statfile)
+
+ return retCode
+
+
+def on_setup_help(_):
+ print("Initialize the configuration of all available modules. Each module")
+ print("implements its own setup handler which is called at this point. The")
+ print("initialization takes the installation directory into account and")
+ print("should be repeated when copying the system to another directory.")
+ print("NOTE:")
+ print("Setup might overwrite already made settings with default values.")
+ return 0
+
+
+def on_shell(_args, _):
+ shell = seiscomp.shell.CLI()
+ try:
+ shell.run(env)
+ except Exception as e:
+ error(str(e))
+ return 1
+ return 0
+
+
+def on_shell_help(_):
+ print("Launches the SeisComP shell, a commandline interface which allows")
+ print("to manage modules configurations and bindings.")
+ return 0
+
+
+def on_enable(args, _):
+ if not args:
+ error("module name required")
+ return 1
+
+ for name in args:
+ modName = get_module(name)
+ if modName is None:
+ error("%s is not available" % name)
+ elif isinstance(modName, seiscomp.kernel.CoreModule):
+ error("%s is a kernel module and is enabled automatically" % name)
+ else:
+ env.enableModule(name)
+ return 0
+
+
+def on_enable_help(_):
+ print("Enables all given modules to be started when 'seiscomp start' is")
+ print("invoked without a module list.")
+ print()
+ print("Examples:")
+ print("seiscomp enable seedlink slarchive")
+
+
+def on_disable(args, _):
+ if not args:
+ error("module name required")
+ return 1
+
+ for name in args:
+ modName = get_module(name)
+ if modName is None:
+ error("%s is not available" % modName)
+ elif isinstance(modName, seiscomp.kernel.CoreModule):
+ error("%s is a kernel module and cannot be disabled" % name)
+ else:
+ env.disableModule(name)
+ return 0
+
+
+def on_disable_help(_):
+ print("Disables all given modules. See 'enable'.")
+ print()
+ print("Examples:")
+ print("seiscomp disable seedlink slarchive")
+
+
+def on_start(args, _):
+ cntStarted = 0
+ if not args:
+ if start_kernel_modules() == 0:
+ cntStarted += 1
+ for mod in mods:
+ # Kernel modules have been started already
+ if isinstance(mod, seiscomp.kernel.CoreModule):
+ continue
+ # Module in autorun?
+ if env.isModuleEnabled(mod.name):
+ if start_module(mod) == 0:
+ cntStarted += 1
+ else:
+ for mod in mods:
+ if mod.name in args or len(args) == 0:
+ if start_module(mod) == 0:
+ cntStarted += 1
+
+ if not useCSV:
+ print("Summary: {} modules started".format(cntStarted))
+
+ return 0
+
+
+def on_start_help(_):
+ print("Starts all enabled modules or a list of modules given.")
+ print()
+ print("Examples:")
+ print("seiscomp start")
+ print("seiscomp start seedlink slarchive")
+
+
+def on_stop(args, _):
+ cntStopped = 0
+ if not args:
+ for mod in reversed(mods):
+ # Kernel modules will be stopped latter
+ if isinstance(mod, seiscomp.kernel.CoreModule):
+ continue
+ if stop_module(mod) == 0:
+ cntStopped += 1
+
+ # Stop all kernel modules
+ if stop_kernel_modules() == 0:
+ cntStopped += 1
+ else:
+ for mod in reversed(mods):
+ if mod.name in args or len(args) == 0:
+ if stop_module(mod) == 0:
+ cntStopped += 1
+
+ if not useCSV:
+ print("Summary: {} modules stopped".format(cntStopped))
+
+ return 0
+
+
+def on_stop_help(_):
+ print("Stops all enabled modules or a list of modules given.")
+ print()
+ print("Examples:")
+ print("seiscomp stop")
+ print("seiscomp stop seedlink slarchive")
+
+
+def on_restart(args, flags):
+ on_stop(args, flags)
+ on_start(args, flags)
+ return 0
+
+
+def on_restart_help(_):
+ print("Restarts all enabled modules or a list of modules given.")
+ print("This command is equal to:")
+ print("seiscomp stop {args}")
+ print("seiscomp start {args}")
+ print()
+ print("Examples:")
+ print("seiscomp restart")
+ print("seiscomp restart seedlink slarchive")
+
+
+def on_reload(args, _):
+ if not args:
+ for mod in mods:
+ # Reload not supported by kernel modules
+ if isinstance(mod, seiscomp.kernel.CoreModule):
+ continue
+
+ if shouldModuleRun(mod.name):
+ mod.reload()
+ else:
+ for mod in mods:
+ if mod.name in args or len(args) == 0:
+ mod.reload()
+
+ return 0
+
+
+def on_reload_help(_):
+ print("Reloads all enabled modules or a list of modules given.")
+ print("This operation is module specific and implemented only for some")
+ print("modules.")
+ print()
+ print("Examples:")
+ print("seiscomp reload")
+ print("seiscomp reload fdsnws")
+
+
+def on_check(args, _):
+ cntStarted = 0
+ for mod in mods:
+ if mod.name in args or len(args) == 0:
+ if shouldModuleRun(mod.name):
+ cntStarted += 1
+ mod.check()
+
+ if not useCSV:
+ print("Summary: {} started modules checked".format(cntStarted))
+
+ return 0
+
+
+def on_check_help(_):
+ print("Checks if a started module is still running. If not, it is")
+ print("restarted. If no modules are given, all started modules are")
+ print("checked.")
+ print()
+ print("Examples:")
+ print("$ seiscomp check seedlink")
+ print("seedlink is already running")
+
+
+def on_exec(args, _):
+ if len(args) < 1:
+ error("no module name given")
+ return False
+
+ # Change back into the working dir
+ env.chback()
+ return system(args)
+
+
+def on_exec_help(_):
+ print("Executes a command like calling a command from commandline.")
+ print("It will setup all paths and execute the command.")
+ print("'seiscomp run' will block until the command terminates.")
+ print("Example:")
+ print("seiscomp exec scolv")
+
+
+def on_list(args, _):
+ if len(args) < 1:
+ error("expected argument: {modules|aliases|enabled|disabled|started}")
+ return 1
+
+ if args[0] == "modules":
+ found = 0
+ for mod in mods:
+ if env.isModuleEnabled(mod.name) or \
+ isinstance(mod, seiscomp.kernel.CoreModule):
+ state = "enabled"
+ else:
+ state = "disabled"
+ found += 1
+ print("%s is %s" % (mod.name, state))
+
+ if not useCSV:
+ print("Summary: {} modules reported".format(found))
+
+ return 0
+
+ if args[0] == "aliases":
+ f = open(ALIAS_FILE, 'r')
+ lines = [line.rstrip() for line in f.readlines()]
+ for line in lines:
+ if line.lstrip().startswith('#') or not line.strip():
+ continue
+ toks = [t.strip() for t in line.split('=')]
+ # Remove invalid lines
+ if len(toks) != 2:
+ continue
+ if useCSV:
+ print("%s;%s" % (toks[0], toks[1]))
+ else:
+ print("%s -> %s" % (toks[0], toks[1]))
+ f.close()
+ return 0
+
+ if args[0] == "enabled":
+ found = 0
+ for mod in mods:
+ if env.isModuleEnabled(mod.name) or \
+ isinstance(mod, seiscomp.kernel.CoreModule):
+ print(mod.name)
+ found += 1
+
+ if not useCSV:
+ print("Summary: {} modules enabled".format(found))
+
+ return 0
+
+ if args[0] == "disabled":
+ found = 0
+ for mod in mods:
+ if not env.isModuleEnabled(mod.name) and \
+ not isinstance(mod, seiscomp.kernel.CoreModule):
+ print(mod.name)
+ found += 1
+
+ if not useCSV:
+ print("Summary: {} modules disabled".format(found))
+
+ return 0
+
+ if args[0] == "started":
+ found = 0
+ for mod in mods:
+ if shouldModuleRun(mod.name):
+ print(mod.name)
+ found += 1
+
+ if not useCSV:
+ print("Summary: {} modules started".format(found))
+
+ return 0
+
+ error(
+ "wrong argument: {modules|aliases|enabled|disabled|started} expected")
+ return 1
+
+
+def on_list_help(_):
+ print("Prints the result of a query. 5 queries are currently supported:")
+ print(" modules: lists all existing modules")
+ print(" aliases: lists all existing aliases")
+ print(" enabled: lists all enabled modules")
+ print(" disabled: lists all disabled modules")
+ print(" started: lists all started modules")
+ print()
+ print("Examples:")
+ print("$ seiscomp list aliases")
+ print("l1autopick -> scautopick")
+
+
+def on_status(args, _):
+ found = 0
+ if len(args) > 0 and args[0] == "enabled":
+ for mod in mods:
+ if env.isModuleEnabled(mod.name) or isinstance(
+ mod, seiscomp.kernel.CoreModule):
+ mod.status(shouldModuleRun(mod.name))
+ found += 1
+
+ if not useCSV:
+ print("Summary: {} modules enabled".format(found))
+
+ return 0
+
+ if len(args) > 0 and args[0] == "started":
+ for mod in mods:
+ if shouldModuleRun(mod.name):
+ mod.status(shouldModuleRun(mod.name))
+ found += 1
+
+ if not useCSV:
+ print("Summary: {} modules started".format(found))
+
+ return 0
+
+ for mod in mods:
+ if mod.name in args or len(args) == 0:
+ mod.status(shouldModuleRun(mod.name))
+ found += 1
+
+ if not useCSV:
+ print("Summary: {} modules reported".format(found))
+ return 0
+
+
+def on_status_help(_):
+ print("Prints the status of ")
+ print(" * all modules")
+ print(" * all enabled modules")
+ print(" * all started modules")
+ print(" * a list of modules")
+ print("and gives a warning if a module should run but doesn't.")
+ print("This command supports csv formatted output via '--csv' switch.")
+ print()
+ print("Examples:")
+ print("$ seiscomp status started")
+ print("$ seiscomp status enabled")
+ print("scmaster is not running [WARNING]")
+ print("$ seiscomp status scautopick")
+ print("scautopick is not running")
+ print("$ seiscomp --csv status scautopick")
+ print("scautopick;0;0;0")
+ print()
+ print("CSV format:")
+ print(" column 1: module name")
+ print(" column 2: running flag")
+ print(" column 3: should run flag")
+ print(" column 4: enabled flag")
+
+
+def on_print(args, _):
+ if len(args) < 1:
+ error("expected argument: {crontab|env}")
+ return 1
+
+ if args[0] == "crontab":
+ print("*/3 * * * * %s check >/dev/null 2>&1" %
+ os.path.join(env.SEISCOMP_ROOT, "bin", "seiscomp"))
+ for mod in mods:
+ mod.printCrontab()
+ elif args[0] == "env":
+ print('export SEISCOMP_ROOT="%s"' % SEISCOMP_ROOT)
+ print('export PATH="%s:$PATH"' % BIN_PATH)
+ print('export %s="%s:$%s"' %
+ (SysLibraryPathVar, get_library_path(), SysLibraryPathVar))
+ if sys.platform == "darwin":
+ print(
+ 'export %s="%s:$%s"' %
+ (SysFrameworkPathVar,
+ get_framework_path(),
+ SysFrameworkPathVar))
+
+ print('export PYTHONPATH="%s:$PYTHONPATH"' % PYTHONPATH)
+ print('export MANPATH="%s:$MANPATH"' % MANPATH)
+ print(
+ 'source "%s/share/shell-completion/seiscomp.bash"' %
+ SEISCOMP_ROOT)
+ hostenv = os.path.join(SEISCOMP_ROOT, "etc", "env", "by-hostname",
+ socket.gethostname())
+ if os.path.isfile(hostenv):
+ print('source %s' % hostenv)
+ else:
+ error("wrong argument: {crontab|env} expected")
+ return 1
+
+ return 0
+
+
+def on_print_help(_):
+ print("seiscomp print {crontab|env}")
+ print(" crontab: prints crontab entries of all registered or given modules.")
+ print(" env: prints environment variables necessary to run SeisComP modules.")
+ print()
+ print("Examples:")
+ print("Source SC environment into current bash session")
+ print("$ eval $(seiscomp/bin/seiscomp print env)")
+
+
+def on_install_deps_linux(args, _):
+
+ try:
+ name, release, version, arch = detectOS()
+ except BaseException as err:
+ print("*********************************************************************")
+ print("seiscomp was not able to figure out the installed distribution")
+ print("You need to check the documentation for required packages and install")
+ print("them manually.")
+ print("Error: {}".format(err))
+ print("*********************************************************************")
+
+ return 1
+
+ print("Distribution: {}-{}-{}({}-{})".format(name, version, arch, release, version))
+
+ for n in range(version.count('.') + 1):
+ ver = version.rsplit('.', n)[0]
+ script_dir = os.path.join(
+ env.SEISCOMP_ROOT, "share", "deps", release.lower(), ver.lower())
+ if os.path.exists(script_dir):
+ break
+
+ if not os.path.exists(script_dir):
+ print("*********************************************************************")
+ print("Sorry, the installed distribution is not supported.")
+ print("You need to check the documentation for required packages and install")
+ print("them manually.")
+ print("*********************************************************************")
+ return 1
+
+ for pkg in args:
+ script = os.path.join(script_dir, "install-" + pkg + ".sh")
+ if not os.path.exists(script):
+ error("no handler available for package '%s'" % pkg)
+ return 1
+
+ if system(["sudo", "sh", script]) != 0:
+ error("installation failed")
+ return 1
+
+ return 0
+
+
+def on_install_deps(args, flags):
+ if not args:
+ error("expected package list: PKG1 [PKG2 [..]]")
+ print("Example: seiscomp install-deps base gui mysql-server")
+ print("For a list of available packages issue: seiscomp help install-deps")
+
+ if sys.platform.startswith("linux"):
+ return on_install_deps_linux(args, flags)
+
+ error("unsupported platform")
+ print("*********************************************************************")
+ print("The platform you are currently running on is not supported to install")
+ print("dependencies automatically.")
+ print("You need to check the documentation for required packages and install")
+ print("them manually.")
+ print("*********************************************************************")
+ return 1
+
+
+def on_install_deps_help(_):
+ print("seiscomp install-deps PKG1 [PKG2 [..]]")
+ print("Installs OS dependencies to run SeisComP. This requires either a 'sudo'")
+ print("or root account. Available packages are:")
+ print(" base: basic packages required by all installations")
+ print(" gui: required by graphical user interfaces, e.g. on workstations")
+ print(" [mysql,mariadb,postgresql]-server:")
+ print(" database management system required by the machine running")
+ print(" the SeisComP messaging system (scmaster)")
+ print(" fdsnws: required for data sharing via the FDSN web services")
+
+ return 0
+
+
+def on_update_config(args, _):
+ kernelModsStarted = False
+ configuredMods = {}
+
+ listOfMods = args
+ if not listOfMods:
+ listOfMods = []
+ for mod in config_mods:
+ listOfMods.append(mod.name)
+
+ while len(listOfMods) > 0:
+ for mod in config_mods:
+ if mod.name in listOfMods:
+ if not kernelModsStarted and mod.requiresKernelModules():
+ print("* starting kernel modules")
+ start_kernel_modules()
+ kernelModsStarted = True
+ print("* configure %s" % mod.name)
+
+ proxy = None
+
+ try:
+ proxy = mod.updateConfigProxy()
+ if is_string(proxy):
+ configuredMods.setdefault(proxy, False)
+ except Exception:
+ pass
+
+ if proxy is None:
+ result = mod.updateConfig()
+
+ try:
+ error_code = int(result)
+ except ValueError:
+ error("unexpected return type when updating "
+ "configuration of %s" % mod.name)
+ return 1
+
+ if error_code != 0:
+ error(
+ "updating configuration for %s failed" % mod.name)
+ return 1
+
+ configuredMods[mod.name] = True
+
+ listOfMods = []
+ # Collect all unconfigured but indirectly requested mods
+ for name, configured in configuredMods.items():
+ if not configured:
+ listOfMods.append(name)
+
+ return 0
+
+
+def on_update_config_help(_):
+ print("Updates the configuration of all available modules. This command")
+ print("will convert the etc/*.cfg to the modules native configuration")
+ print("including its bindings.")
+ return 0
+
+
+def on_alias(args, _):
+ if len(args) < 2:
+ error("expected arguments: {create|remove} ALIAS_NAME APP_NAME")
+ return 1
+
+ aliasName = args[1]
+
+ if args[0] == "create":
+ if len(args) != 3:
+ error("expected two arguments for create: ALIAS_NAME APP_NAME")
+ return 1
+
+ mod = None
+ for module in mods:
+ if module.name == args[2]:
+ mod = module
+ break
+
+ if not mod:
+ error("module '%s' not found" % args[2])
+ return 1
+
+ supportsAliases = False
+ try:
+ supportsAliases = mod.supportsAliases()
+ except BaseException:
+ pass
+
+ if not supportsAliases:
+ error("module '%s' does not support aliases" % args[2])
+ return 1
+
+ mod2 = args[2]
+ if os.path.exists(os.path.join("bin", mod2)):
+ mod1 = os.path.join("bin", aliasName)
+ elif os.path.exists(os.path.join("sbin", mod2)):
+ mod1 = os.path.join("sbin", aliasName)
+ else:
+ error("no %s binary found (neither bin nor sbin)")
+ return 1
+
+ # create alias line in etc/descriptions/aliases
+ if not os.path.exists(DESC_PATH):
+ try:
+ os.makedirs(DESC_PATH)
+ except Exception:
+ error("failed to create directory: %s" % DESC_PATH)
+ return 1
+
+ has_alias = False
+ lines = []
+ new_lines = []
+ try:
+ f = open(ALIAS_FILE, 'r')
+ lines = [line.rstrip() for line in f.readlines()]
+ for line in lines:
+ if line.lstrip().startswith('#') or not line.strip():
+ # Keep comments or empty lines
+ new_lines.append(line)
+ continue
+ toks = [t.strip() for t in line.split('=')]
+ # Remove invalid lines
+ if len(toks) != 2:
+ continue
+ if toks[0] == aliasName:
+ has_alias = True
+ break
+
+ new_lines.append(line)
+ f.close()
+ except BaseException:
+ pass
+
+ if has_alias:
+ warning("%s is already registered as alias for %s in " \
+ "$SEISCOMP_ROOT/etc/descriptions/aliases" % (aliasName, toks[1]))
+ warning(" + do not register again but trying to link the required files")
+ else:
+ print(
+ "Registered alias '%s' in $SEISCOMP_ROOT/etc/descriptions/aliases" %
+ (aliasName))
+
+ # Check if target exists already
+ if os.path.exists(os.path.join(SEISCOMP_ROOT, mod1)):
+ warning(
+ "link '%s' to '%s' exists already in %s/bin/" %
+ (aliasName, mod2, SEISCOMP_ROOT))
+ warning(" + do not link again")
+
+ try:
+ f = open(ALIAS_FILE, 'w')
+ except BaseException:
+ error("failed to open/create alias file: %s" % ALIAS_FILE)
+ return 1
+
+ new_lines.append("%s = %s" % (aliasName, args[2]))
+
+ f.write("\n".join(new_lines) + "\n")
+ f.close()
+
+ # create symlink of defaults from etc/defaults/mod1.cfg to etc/defaults/mod2.cfg
+ # use relative path to default_cfg2
+ cwdAlias = os.getcwd()
+ os.chdir(os.path.join(SEISCOMP_ROOT, "etc", "defaults"))
+ default_cfg1 = aliasName + ".cfg"
+ default_cfg2 = args[2] + ".cfg"
+ if os.path.exists(default_cfg2):
+ print("Linking default configuration: %s -> %s" %
+ (default_cfg2, default_cfg1))
+ # - first: remove target
+ try:
+ os.remove(default_cfg1)
+ except BaseException:
+ pass
+ # create symlink
+ os.symlink(os.path.relpath(default_cfg2), default_cfg1)
+ else:
+ print("No default configuration to link")
+ # return to initial directory
+ os.chdir(cwdAlias)
+
+ # create symlink from bin/mod1 to bin/mod2
+ # - first: remove target
+ try:
+ os.remove(os.path.join(SEISCOMP_ROOT, mod1))
+ except BaseException:
+ pass
+ print("Creating app symlink: %s -> %s" % (mod2, mod1))
+ os.symlink(mod2, os.path.join(SEISCOMP_ROOT, mod1))
+
+ # create symlink from etc/init/mod1.py to etc/init/mod2.py
+ cwdAlias = os.getcwd()
+ os.chdir(os.path.join(SEISCOMP_ROOT, "etc", "init"))
+ init1 = aliasName + ".py"
+ init2 = args[2] + ".py"
+ print("Linking init script: %s -> %s" % (init2, init1))
+ # - first: remove target
+ try:
+ os.remove(init1)
+ except BaseException:
+ pass
+ # create symlink with relative path
+ os.symlink(os.path.relpath(init2), init1)
+ # return to initial directory
+ os.chdir(cwdAlias)
+
+ return 0
+
+ if args[0] == "remove":
+ if len(args) != 2:
+ error("expected one argument for remove: alias-name")
+ return 1
+
+ print("Removing alias '%s'" % aliasName)
+ # check and remove alias line in etc/descriptions/aliases
+ has_alias = False
+ lines = []
+ new_lines = []
+ try:
+ f = open(ALIAS_FILE, 'r')
+ lines = [line.rstrip() for line in f.readlines()]
+ for line in lines:
+ if line.lstrip().startswith('#') or not line.strip():
+ # Keep comments or empty lines
+ new_lines.append(line)
+ continue
+ toks = [t.strip() for t in line.split('=')]
+ # Remove invalid lines
+ if len(toks) != 2:
+ continue
+ if toks[0] == aliasName:
+ has_alias = True
+ else:
+ new_lines.append(line)
+ f.close()
+ except BaseException:
+ pass
+
+ if not has_alias:
+ print(" + {} is not defined as an alias".format(aliasName))
+ if not interactiveMode:
+ print(" + remove related configuration with '--interactive'")
+ if len(lines) == len(new_lines):
+ return 1
+
+ try:
+ f = open(ALIAS_FILE, 'w')
+ except BaseException:
+ error(" + failed to open/create alias file: %s" % ALIAS_FILE)
+ return 1
+
+ if len(lines) > 0:
+ f.write("\n".join(new_lines) + "\n")
+ f.close()
+
+ if not has_alias:
+ if not interactiveMode:
+ return 1
+
+ # remove symlink from bin/mod1
+ if os.path.exists(os.path.join("bin", aliasName)):
+ sym_link = os.path.join("bin", aliasName)
+ elif os.path.exists(os.path.join("sbin", aliasName)):
+ sym_link = os.path.join("sbin", aliasName)
+ else:
+ sym_link = ""
+
+ if sym_link:
+ print(" + removing app symlink: %s" % sym_link)
+ try:
+ os.remove(os.path.join(SEISCOMP_ROOT, sym_link))
+ except BaseException:
+ pass
+
+ # remove symlink from etc/init/mod1.py
+ init_scr = os.path.join("etc", "init", aliasName + ".py")
+ print(" + removing init script: %s" % init_scr)
+ try:
+ os.remove(os.path.join(SEISCOMP_ROOT, init_scr))
+ except BaseException:
+ pass
+
+ # delete defaults etc/defaults/mod1.cfg
+ default_cfg = os.path.join("etc", "defaults", aliasName + ".cfg")
+ print(" + removing default configuration: {}/{}"
+ .format(SEISCOMP_ROOT, default_cfg))
+ try:
+ os.remove(os.path.join(SEISCOMP_ROOT, default_cfg))
+ except BaseException as e:
+ error(" + could not remove %s" % e)
+
+ if not interactiveMode:
+ warning("No other configuration removed for '%s' - interactive"
+ " removal is supported by '--interactive'" % aliasName)
+ return 0
+
+ # test module configuration files
+ # SYSTEMCONFIGDIR
+ cfg = os.path.join("etc", aliasName + ".cfg")
+ if os.path.isfile(cfg):
+ print(" + found module configuration file: {}/{}"
+ .format(SEISCOMP_ROOT, cfg))
+ answer = getInput(" + do you wish to remove it?", 'n', 'yn')
+ if answer == "y":
+ try:
+ os.remove(cfg)
+ except Exception as e:
+ error(" + could not remove '%s' - try manually" % e)
+
+ # CONFIGDIR
+ cfg = os.path.join(
+ os.path.expanduser("~"),
+ ".seiscomp",
+ aliasName + ".cfg")
+ if os.path.isfile(cfg):
+ print(" + found module configuration file: {}".format(cfg))
+ answer = getInput(" + do you wish to remove it?", 'n', 'yn')
+ if answer == "y":
+ try:
+ os.remove(cfg)
+ except Exception as e:
+ error(" + could not remove the file: %s - try manually" % e)
+
+ # test module binding files
+ bindingDir = os.path.join(SEISCOMP_ROOT, "etc", "key", aliasName)
+ if os.path.exists(bindingDir):
+ print(" + found binding directory: {}".format(bindingDir))
+ answer = getInput(" + do you wish to remove it?", 'n', 'yn')
+ if answer == "y":
+ try:
+ shutil.rmtree(bindingDir)
+ except Exception as e:
+ error(" + could not remove the directory: %s - try manually" % e)
+
+ # test key files
+ keyDir = os.path.join(SEISCOMP_ROOT, 'etc', 'key')
+ dirContent = os.listdir(keyDir)
+ keyFiles = []
+ print(" + testing key files")
+ for f in dirContent:
+ if not os.path.isfile(os.path.join(keyDir, f)) or \
+ not f.startswith("station_"):
+ continue
+
+ keyFile = os.path.join(keyDir, f)
+ with open(keyFile, 'r') as fp:
+ # Read all lines in the file one by one
+ for line in fp:
+ # check if the line starts with the module name
+ if line.startswith(aliasName):
+ keyFiles.append(keyFile)
+ print(" + found binding for '{}' in: {}".format(aliasName, keyFile))
+
+ if keyFiles:
+ print(" + found {} bindings for '{}' in key files".format(len(keyFiles), aliasName))
+ question = " + remove all '{}' bindings from key files?".format(aliasName)
+ answer = getInput(question, 'n', 'yn')
+ if answer == "y":
+ shell = seiscomp.shell.CLI(env)
+ shell.commandRemove(["module", aliasName, "*.*"])
+ else:
+ print(" + found no key files")
+
+ return 0
+
+ error("Wrong command '%s': expected 'create' or 'remove'" % args[0])
+ return 1
+
+
+def on_alias_help(_):
+ print("seiscomp alias {create|remove} ALIAS_NAME APP_NAME")
+ print("Creates/removes symlinks to applications. Symlinks to symlinks are not allowed.")
+ print()
+ print("Examples:")
+ print("$ seiscomp alias create scautopick2 scautopick")
+ print("Copy default configuration: etc/defaults/scautopick.cfg -> etc/defaults/scautopick2.cfg")
+ print("Create app symlink: scautopick -> bin/scautopick2")
+ print("Copy init script: etc/init/scautopick.py -> etc/init/scautopick2.py")
+ print()
+ print("$ seiscomp alias remove scautopick2")
+ print("Remove default configuration: etc/defaults/scautopick2.cfg")
+ print("Remove app symlink: bin/scautopick2")
+ print("Remove init script: etc/init/scautopick2.py")
+
+
+allowed_actions = [
+ "install-deps",
+ "setup",
+ "shell",
+ "enable",
+ "disable",
+ "start",
+ "stop",
+ "restart",
+ "reload",
+ "check",
+ "status",
+ "list",
+ "exec",
+ "update-config",
+ "alias",
+ "print",
+ "help"
+]
+
+
+# Define all actions that do not need locking of seiscomp
+actions_without_lock = [
+ # "install-deps",
+ "help",
+ "list",
+ "exec",
+ "print"
+]
+
+
+def on_help(args, _):
+ if not args:
+ print("Name:")
+ print(" seiscomp - Load the environment of the SeisComP installation from " \
+ "where seiscomp is executed and run a command")
+ print("\nSynopsis:")
+ print(" seiscomp [flags] [commands] [arguments]")
+ print("\nFlags:")
+ print(" --asroot Allow running a command as root")
+ print(" --csv Print output as csv in machine-readable format")
+ print(" -i, [--interactive] Interactive mode: Allow deleting files " \
+ "interactively when removing aliases")
+ print(" --wait arg Define a timeout in seconds for acquiring the seiscomp " \
+ "lock file, e.g. `seiscomp --wait 10 update-config`")
+ print("\nAvailable commands:")
+ for helpAction in allowed_actions:
+ print(" %s" % helpAction)
+
+ print("\nUse 'help [command]' to get more help about a command")
+ print("\nExamples:")
+ print(" seiscomp help update-config Show help for update-config")
+ print(" seiscomp update-config Run update-config for allmodules")
+ print(" seiscomp update-config trunk Run update-config for all trunk modules")
+ print(" seiscomp update-config scautopick Run update-config for scautopick")
+ return 0
+
+ cmd = args[0]
+ try:
+ func = globals()["on_" + cmd.replace("-", "_") + "_help"]
+ except BaseException:
+ print("Sorry, no help available for %s" % cmd)
+ return 1
+ func(args[1:])
+ return 0
+
+
+def run_action(runAction, args, flags):
+ try:
+ func = globals()["on_" + runAction.replace("-", "_")]
+ return func(args, flags)
+ except Exception as exc:
+ error("command '%s' failed: %s" % (runAction, str(exc)))
+ if "debug" in flags:
+ info = traceback.format_exception(*sys.exc_info())
+ for i in info:
+ sys.stderr.write(i)
+ return 2
+
+
+def on_csv_help(_):
+ print("If --csv is prepended to a usual command the internal output is")
+ print("set to comma separated values. The only command that currently")
+ print("uses this output format is 'status'.")
+ print()
+ print("Example:")
+ print("seiscomp --csv status")
+ return 0
+
+
+# ------------------------------------------------------------------------------
+# Check command line
+# ------------------------------------------------------------------------------
+useCSV = False
+asRoot = False
+lockTimeout = None
+interactiveMode = False
+
+argv = sys.argv[1:]
+argflags = []
+
+# Check for flags
+while argv:
+ if argv[0] == "--csv":
+ useCSV = True
+ argv = argv[1:]
+ elif argv[0] == "--asroot":
+ asRoot = True
+ argv = argv[1:]
+ if argv[0] == "--interactive" or argv[0] == "-i":
+ interactiveMode = True
+ argv = argv[1:]
+ elif argv[0] == "--wait":
+ argv = argv[1:]
+ if not argv:
+ print("--wait expects an integer value in seconds")
+ sys.exit(1)
+ try:
+ lockTimeout = int(argv[0])
+ except BaseException:
+ print("Wait timeout is not an integer: %s" % argv[0])
+ sys.exit(1)
+ if lockTimeout < 0:
+ print("Wait timeout must be positive: %s" % argv[0])
+ sys.exit(1)
+ argv = argv[1:]
+ elif argv[0].startswith("--"):
+ argflags.append(argv[0][2:])
+ argv = argv[1:]
+ else:
+ break
+
+if len(argv) < 1:
+ print("seiscomp [flags] {%s} [args]" % "|".join(allowed_actions))
+ print("\nUse 'seiscomp help' to get more help")
+ sys.exit(1)
+
+action = argv[0]
+arguments = argv[1:]
+
+if action not in allowed_actions:
+ print("seiscomp [flags] {%s} [args]" % "|".join(allowed_actions))
+ sys.exit(1)
+
+if os.getuid() == 0 and not asRoot and action != "install-deps":
+ print("Running 'seiscomp' as root is dangerous. Use --asroot only if you")
+ print("know exactly what you are doing!")
+ sys.exit(1)
+
+# ------------------------------------------------------------------------------
+# Initialize the environment
+# ------------------------------------------------------------------------------
+
+# Resolve symlinks to files (if any)
+if os.path.islink(sys.argv[0]):
+ # Read the link target
+ target = os.readlink(sys.argv[0])
+ # Is the target an absolute path then take it as is
+ if os.path.isabs(target):
+ sys.argv[0] = target
+ # Otherwise join the dirname of the script with the target
+ # to get the semi-real path of the seiscomp script. Semi-real
+ # refers to the fact that symlinks are not completely resolved
+ # and why the usage of os.path.realpath is avoided. If the
+ # seiscomp directory itself is a symlink it should be preserved.
+ else:
+ sys.argv[0] = os.path.join(os.path.dirname(sys.argv[0]), target)
+
+# Guess SEISCOMP_ROOT from path of called script, directory links are not
+# resolved allowing to create separate SeisComP environments
+if os.path.isabs(sys.argv[0]):
+ root_path = sys.argv[0]
+else:
+ cwd = os.getenv('PWD')
+ if cwd is None:
+ cwd = os.getcwd()
+ root_path = os.path.join(cwd, sys.argv[0])
+
+SEISCOMP_ROOT = os.path.dirname(os.path.dirname(os.path.normpath(root_path)))
+INIT_PATH = os.path.join(SEISCOMP_ROOT, "etc", "init")
+DESC_PATH = os.path.join(SEISCOMP_ROOT, "etc", "descriptions")
+ALIAS_FILE = os.path.join(DESC_PATH, "aliases")
+BIN_PATH = os.path.join(SEISCOMP_ROOT, "bin")
+SBIN_PATH = os.path.join(SEISCOMP_ROOT, "sbin")
+PYTHONPATH = os.path.join(SEISCOMP_ROOT, "lib", "python")
+MANPATH = os.path.join(SEISCOMP_ROOT, "share", "man")
+LD_LIBRARY_PATH = os.path.join(SEISCOMP_ROOT, "lib")
+DYLD_FALLBACK_FRAMEWORK_PATH = os.path.join(SEISCOMP_ROOT, "lib", "3rd-party")
+
+# Run another process with proper LD_LIBRARY_PATH set otherwise the dynamic
+# linker will not find dependent SC3 libraries
+isWrapped = False
+try:
+ if os.environ["SEISCOMP_WRAP"] == "TRUE":
+ isWrapped = True
+except BaseException:
+ pass
+
+
+# Setup signal handler
+#signal.signal(signal.SIGTERM, sigterm_handler)
+
+if not isWrapped:
+ try:
+ os.environ["PATH"] = BIN_PATH + ":" + os.environ["PATH"]
+ except BaseException:
+ os.environ["PATH"] = BIN_PATH
+
+ try:
+ os.environ[SysLibraryPathVar] = get_library_path() + ":" + \
+ os.environ[SysLibraryPathVar]
+ except BaseException:
+ os.environ[SysLibraryPathVar] = get_library_path()
+
+ if sys.platform == "darwin":
+ os.environ[SysFrameworkPathVar] = get_framework_path()
+
+ try:
+ os.environ["PYTHONPATH"] = PYTHONPATH + ":" + os.environ["PYTHONPATH"]
+ except BaseException:
+ os.environ["PYTHONPATH"] = PYTHONPATH
+ try:
+ os.environ["MANPATH"] = MANPATH + ":" + os.environ["MANPATH"]
+ except BaseException:
+ os.environ["MANPATH"] = MANPATH
+
+ os.environ["SEISCOMP_WRAP"] = "TRUE"
+
+ sys.exit(system(sys.argv))
+
+# Register local lib/python in SEARCH PATH
+sys.path.insert(0, PYTHONPATH)
+
+# Create environment which supports queries for various SeisComP
+# directoris and sets PATH, LD_LIBRARY_PATH and PYTHONPATH
+env = seiscomp.kernel.Environment(SEISCOMP_ROOT)
+env.setCSVOutput(useCSV)
+
+# Check for lock file
+isChild = False
+
+if action in actions_without_lock:
+ isChild = True
+else:
+ try:
+ isChild = os.environ["SEISCOMP_LOCK"] == "TRUE"
+ except KeyError:
+ pass
+
+if not isChild:
+ if not env.tryLock("seiscomp", lockTimeout):
+ error("Could not get lock %s - is another process using it?" %
+ env.lockFile("seiscomp"))
+ sys.exit(1)
+
+ os.environ["SEISCOMP_LOCK"] = "TRUE"
+ exitcode = system(
+ ["run_with_lock", "-q", env.lockFile("seiscomp")] + sys.argv)
+ sys.exit(exitcode)
+
+
+# Change into SEISCOMP_ROOT directory. The env instance will change
+# back into the current working directory automatically if destroyed.
+env.chroot()
+
+simpleCommand = (action == "install-deps") or \
+ (action == "print" and arguments == "env")
+
+if not simpleCommand:
+ config_mods = load_init_modules(INIT_PATH)
+ mods = []
+ for m in config_mods:
+ if m.isConfigModule:
+ continue
+ mods.append(m)
+
+sys.exit(run_action(action, arguments, argflags))
diff --git a/bin/seiscomp-python b/bin/seiscomp-python
new file mode 100755
index 0000000..45b803e
--- /dev/null
+++ b/bin/seiscomp-python
@@ -0,0 +1,19 @@
+#!/bin/sh
+#
+# This is a shell script that executes the Python interpreter as
+# configured using cmake.
+#
+# In order to use this in your Python programs use this
+# shebang line:
+
+#!/usr/bin/env seiscomp-python
+
+# Please note that this wrapper does *not* set the environment
+# variables for you. To ensure that you run your script in the
+# proper environment, please use 'seiscomp exec'. Alternatively
+# you can also set your environment variables according to the
+# output of 'seiscomp print env'.
+
+python_executable="/usr/bin/python3"
+
+exec $python_executable "$@"
diff --git a/bin/sh2proc b/bin/sh2proc
new file mode 100755
index 0000000..891eb78
--- /dev/null
+++ b/bin/sh2proc
@@ -0,0 +1,884 @@
+#!/usr/bin/env seiscomp-python
+# -*- coding: utf-8 -*-
+############################################################################
+# Copyright (C) GFZ Potsdam #
+# All rights reserved. #
+# #
+# GNU Affero General Public License Usage #
+# This file may be used under the terms of the GNU Affero #
+# Public License version 3.0 as published by the Free Software Foundation #
+# and appearing in the file LICENSE included in the packaging of this #
+# file. Please review the following information to ensure the GNU Affero #
+# Public License version 3.0 requirements will be met: #
+# https://www.gnu.org/licenses/agpl-3.0.html. #
+# #
+# Author: Alexander Jaeger, Stephan Herrnkind, #
+# Lukas Lehmann, Dirk Roessler# #
+# Email: herrnkind@gempa.de #
+############################################################################
+
+
+import seiscomp.client, seiscomp.core, seiscomp.datamodel, seiscomp.io, seiscomp.logging, seiscomp.math
+from time import strptime
+import sys
+import traceback
+
+TimeFormats = [
+ '%d-%b-%Y_%H:%M:%S.%f',
+ '%d-%b-%Y_%H:%M:%S'
+]
+
+
+# SC3 has more event types available in the datamodel
+EventTypes = {
+ 'teleseismic quake': seiscomp.datamodel.EARTHQUAKE,
+ 'local quake': seiscomp.datamodel.EARTHQUAKE,
+ 'regional quake': seiscomp.datamodel.EARTHQUAKE,
+ 'quarry blast': seiscomp.datamodel.QUARRY_BLAST,
+ 'nuclear explosion': seiscomp.datamodel.NUCLEAR_EXPLOSION,
+ 'mining event': seiscomp.datamodel.MINING_EXPLOSION
+}
+
+
+def wfs2Str(wfsID):
+ return '%s.%s.%s.%s' % (wfsID.networkCode(), wfsID.stationCode(),
+ wfsID.locationCode(), wfsID.channelCode())
+
+
+###############################################################################
+class SH2Proc(seiscomp.client.Application):
+
+ ###########################################################################
+ def __init__(self):
+ seiscomp.client.Application.__init__(self, len(sys.argv), sys.argv)
+ self.setMessagingEnabled(True)
+ self.setDatabaseEnabled(True, True)
+ self.setLoadInventoryEnabled(True)
+ self.setLoadConfigModuleEnabled(True)
+ self.setDaemonEnabled(False)
+
+ self.inputFile = '-'
+
+ ###########################################################################
+ def initConfiguration(self):
+ if not seiscomp.client.Application.initConfiguration(self):
+ return False
+
+ # If the database connection is passed via command line or configuration
+ # file then messaging is disabled. Messaging is only used to get
+ # the configured database connection URI.
+ if self.databaseURI() != '':
+ self.setMessagingEnabled(False)
+ else:
+ # A database connection is not required if the inventory is loaded
+ # from file
+ if not self.isInventoryDatabaseEnabled():
+ self.setMessagingEnabled(False)
+ self.setDatabaseEnabled(False, False)
+
+ return True
+
+ ##########################################################################
+ def printUsage(self):
+
+ print('''Usage:
+ sh2proc [options]
+
+Convert Seismic Handler event data to SeisComP XML format''')
+
+ seiscomp.client.Application.printUsage(self)
+
+ print('''Examples:
+Convert the Seismic Handler file shm.evt to SCML. Receive the database
+connection to read inventory and configuration information from messaging
+ sh2proc shm.evt
+
+Read Seismic Handler data from stdin. Provide inventory and configuration in XML
+ cat shm.evt | sh2proc --inventory-db=inventory.xml --config-db=config.xml
+''')
+
+ ##########################################################################
+ def validateParameters(self):
+ if not seiscomp.client.Application.validateParameters(self):
+ return False
+
+ for opt in self.commandline().unrecognizedOptions():
+ if len(opt) > 1 and opt.startswith('-'):
+ continue
+
+ self.inputFile = opt
+ break
+
+ return True
+
+ ###########################################################################
+ def loadStreams(self):
+ now = seiscomp.core.Time.GMT()
+ inv = seiscomp.client.Inventory.Instance()
+
+ self.streams = {}
+
+ # try to load streams by detecLocid and detecStream
+ mod = self.configModule()
+ if mod is not None and mod.configStationCount() > 0:
+ seiscomp.logging.info('loading streams using detecLocid and detecStream')
+ for i in range(mod.configStationCount()):
+ cfg = mod.configStation(i)
+ net = cfg.networkCode()
+ sta = cfg.stationCode()
+ if sta in self.streams:
+ seiscomp.logging.warning(
+ 'ambiguous stream id found for station %s.%s' % (net, sta))
+ continue
+
+ setup = seiscomp.datamodel.findSetup(cfg, self.name(), True)
+ if not setup:
+ seiscomp.logging.warning(
+ 'could not find station setup for %s.%s' % (net, sta))
+ continue
+
+ params = seiscomp.datamodel.ParameterSet.Find(setup.parameterSetID())
+ if not params:
+ seiscomp.logging.warning(
+ 'could not find station parameters for %s.%s' % (net, sta))
+ continue
+
+ detecLocid = ''
+ detecStream = None
+
+ for j in range(params.parameterCount()):
+ param = params.parameter(j)
+ if param.name() == 'detecStream':
+ detecStream = param.value()
+ elif param.name() == 'detecLocid':
+ detecLocid = param.value()
+
+ if detecStream is None:
+ seiscomp.logging.warning(
+ 'could not find detecStream for %s.%s' % (net, sta))
+ continue
+
+ loc = inv.getSensorLocation(net, sta, detecLocid, now)
+ if loc is None:
+ seiscomp.logging.warning(
+ 'could not find preferred location for %s.%s' % (net, sta))
+ continue
+
+ components = {}
+ tc = seiscomp.datamodel.ThreeComponents()
+ seiscomp.datamodel.getThreeComponents(tc, loc, detecStream[:2], now)
+ if tc.vertical():
+ cha = tc.vertical()
+ wfsID = seiscomp.datamodel.WaveformStreamID(net, sta, loc.code(),
+ cha.code(), '')
+ components[cha.code()[-1]] = wfsID
+ seiscomp.logging.debug('add stream %s (vertical)' % wfs2Str(wfsID))
+ if tc.firstHorizontal():
+ cha = tc.firstHorizontal()
+ wfsID = seiscomp.datamodel.WaveformStreamID(net, sta, loc.code(),
+ cha.code(), '')
+ components[cha.code()[-1]] = wfsID
+ seiscomp.logging.debug('add stream %s (first horizontal)' % wfs2Str(wfsID))
+ if tc.secondHorizontal():
+ cha = tc.secondHorizontal()
+ wfsID = seiscomp.datamodel.WaveformStreamID(net, sta, loc.code(),
+ cha.code(), '')
+ components[cha.code()[-1]] = wfsID
+ seiscomp.logging.debug('add stream %s (second horizontal)' % wfs2Str(wfsID))
+ if len(components) > 0:
+ self.streams[sta] = components
+
+ return
+
+ # fallback loading streams from inventory
+ seiscomp.logging.warning(
+ 'no configuration module available, loading streams '
+ 'from inventory and selecting first available stream '
+ 'matching epoch')
+ for iNet in range(inv.inventory().networkCount()):
+ net = inv.inventory().network(iNet)
+ seiscomp.logging.debug('network %s: loaded %i stations' % (net.code(), net.stationCount()))
+ for iSta in range(net.stationCount()):
+ sta = net.station(iSta)
+ try:
+ start = sta.start()
+ if not start <= now:
+ continue
+ except:
+ continue
+
+ try:
+ end = sta.end()
+ if not now <= end:
+ continue
+ except:
+ pass
+
+ for iLoc in range(sta.sensorLocationCount()):
+ loc = sta.sensorLocation(iLoc)
+ for iCha in range(loc.streamCount()):
+ cha = loc.stream(iCha)
+
+ wfsID = seiscomp.datamodel.WaveformStreamID(net.code(),
+ sta.code(), loc.code(), cha.code(), '')
+ comp = cha.code()[2]
+ if sta.code() not in self.streams:
+ components = {}
+ components[comp] = wfsID
+ self.streams[sta.code()] = components
+ else:
+ # Seismic Handler does not support network,
+ # location and channel code: make sure network and
+ # location codes match first item in station
+ # specific steam list
+ oldWfsID = list(self.streams[sta.code()].values())[0]
+ if net.code() != oldWfsID.networkCode() or \
+ loc.code() != oldWfsID.locationCode() or \
+ cha.code()[:2] != oldWfsID.channelCode()[:2]:
+ seiscomp.logging.warning(
+ 'ambiguous stream id found for station %s, ignoring %s'
+ % (sta.code(), wfs2Str(wfsID)))
+ continue
+
+ self.streams[sta.code()][comp] = wfsID
+
+ seiscomp.logging.debug('add stream %s' % wfs2Str(wfsID))
+
+ ###########################################################################
+ def parseTime(self, timeStr):
+ time = seiscomp.core.Time()
+ for fmt in TimeFormats:
+ if time.fromString(timeStr, fmt):
+ break
+ return time
+
+ ###########################################################################
+ def parseMagType(self, value):
+ if value == 'm':
+ return 'M'
+ elif value == 'ml':
+ return 'ML'
+ elif value == 'mb':
+ return 'mb'
+ elif value == 'ms':
+ return 'Ms(BB)'
+ elif value == 'mw':
+ return 'Mw'
+ elif value == 'bb':
+ return 'mB'
+
+ return ''
+
+ ###########################################################################
+ def sh2proc(self, file):
+ ep = seiscomp.datamodel.EventParameters()
+ origin = seiscomp.datamodel.Origin.Create()
+ event = seiscomp.datamodel.Event.Create()
+
+ origin.setCreationInfo(seiscomp.datamodel.CreationInfo())
+ origin.creationInfo().setCreationTime(seiscomp.core.Time.GMT())
+
+ originQuality = None
+ originCE = None
+ latFound = False
+ lonFound = False
+ depthError = None
+ originComments = {}
+
+ # variables, reset after 'end of phase'
+ pick = None
+ stationMag = None
+ staCode = None
+ compCode = None
+ stationMagBB = None
+
+ amplitudeDisp = None
+ amplitudeVel = None
+ amplitudeSNR = None
+ amplitudeBB = None
+
+ magnitudeMB = None
+ magnitudeML = None
+ magnitudeMS = None
+ magnitudeBB = None
+
+ km2degFac = 1.0 / seiscomp.math.deg2km(1.0)
+
+ # read file line by line, split key and value at colon
+ iLine = 0
+ for line in file:
+ iLine += 1
+ a = line.split(':', 1)
+ key = a[0].strip()
+ keyLower = key.lower()
+ value = None
+
+ # empty line
+ if len(keyLower) == 0:
+ continue
+
+ # end of phase
+ elif keyLower == '--- end of phase ---':
+ if pick is None:
+ seiscomp.logging.warning(
+ 'Line %i: found empty phase block' % iLine)
+ continue
+
+ if staCode is None or compCode is None:
+ seiscomp.logging.warning(
+ 'Line %i: end of phase, stream code incomplete' % iLine)
+ continue
+
+ if not staCode in self.streams:
+ seiscomp.logging.warning(
+ 'Line %i: end of phase, station code %s not found in inventory' % (iLine, staCode))
+ continue
+
+ if not compCode in self.streams[staCode]:
+ seiscomp.logging.warning(
+ 'Line %i: end of phase, component %s of station %s not found in inventory' % (iLine, compCode, staCode))
+ continue
+
+ streamID = self.streams[staCode][compCode]
+
+ pick.setWaveformID(streamID)
+ ep.add(pick)
+
+ arrival.setPickID(pick.publicID())
+ arrival.setPhase(phase)
+ origin.add(arrival)
+
+ if amplitudeSNR is not None:
+ amplitudeSNR.setPickID(pick.publicID())
+ amplitudeSNR.setWaveformID(streamID)
+ ep.add(amplitudeSNR)
+
+ if amplitudeBB is not None:
+ amplitudeBB.setPickID(pick.publicID())
+ amplitudeBB.setWaveformID(streamID)
+ ep.add(amplitudeBB)
+
+ if stationMagBB is not None:
+ stationMagBB.setWaveformID(streamID)
+ origin.add(stationMagBB)
+ stationMagContrib = seiscomp.datamodel.StationMagnitudeContribution()
+ stationMagContrib.setStationMagnitudeID(
+ stationMagBB.publicID())
+ if magnitudeBB is None:
+ magnitudeBB = seiscomp.datamodel.Magnitude.Create()
+ magnitudeBB.add(stationMagContrib)
+
+ if stationMag is not None:
+ if stationMag.type() in ['mb', 'ML'] and amplitudeDisp is not None:
+ amplitudeDisp.setPickID(pick.publicID())
+ amplitudeDisp.setWaveformID(streamID)
+ amplitudeDisp.setPeriod(
+ seiscomp.datamodel.RealQuantity(ampPeriod))
+ amplitudeDisp.setType(stationMag.type())
+ ep.add(amplitudeDisp)
+
+ if stationMag.type() in ['Ms(BB)'] and amplitudeVel is not None:
+ amplitudeVel.setPickID(pick.publicID())
+ amplitudeVel.setWaveformID(streamID)
+ amplitudeVel.setPeriod(
+ seiscomp.datamodel.RealQuantity(ampPeriod))
+ amplitudeVel.setType(stationMag.type())
+ ep.add(amplitudeVel)
+
+ stationMag.setWaveformID(streamID)
+ origin.add(stationMag)
+
+ stationMagContrib = seiscomp.datamodel.StationMagnitudeContribution()
+ stationMagContrib.setStationMagnitudeID(
+ stationMag.publicID())
+
+ magType = stationMag.type()
+ if magType == 'ML':
+ if magnitudeML is None:
+ magnitudeML = seiscomp.datamodel.Magnitude.Create()
+ magnitudeML.add(stationMagContrib)
+
+ elif magType == 'Ms(BB)':
+ if magnitudeMS is None:
+ magnitudeMS = seiscomp.datamodel.Magnitude.Create()
+ magnitudeMS.add(stationMagContrib)
+
+ elif magType == 'mb':
+ if magnitudeMB is None:
+ magnitudeMB = seiscomp.datamodel.Magnitude.Create()
+ magnitudeMB.add(stationMagContrib)
+
+ pick = None
+ staCode = None
+ compCode = None
+ stationMag = None
+ stationMagBB = None
+ amplitudeDisp = None
+ amplitudeVel = None
+ amplitudeSNR = None
+ amplitudeBB = None
+ continue
+
+ # empty key
+ elif len(a) == 1:
+ seiscomp.logging.warning('Line %i: key without value' % iLine)
+ continue
+
+ value = a[1].strip()
+ if pick is None:
+ pick = seiscomp.datamodel.Pick.Create()
+ arrival = seiscomp.datamodel.Arrival()
+
+ try:
+ ##############################################################
+ # station parameters
+
+ # station code
+ if keyLower == 'station code':
+ staCode = value
+
+ # pick time
+ elif keyLower == 'onset time':
+ pick.setTime(seiscomp.datamodel.TimeQuantity(self.parseTime(value)))
+
+ # pick onset type
+ elif keyLower == 'onset type':
+ found = False
+ for onset in [seiscomp.datamodel.EMERGENT, seiscomp.datamodel.IMPULSIVE,
+ seiscomp.datamodel.QUESTIONABLE]:
+ if value == seiscomp.datamodel.EPickOnsetNames_name(onset):
+ pick.setOnset(onset)
+ found = True
+ break
+ if not found:
+ raise Exception('Unsupported onset value')
+
+ # phase code
+ elif keyLower == 'phase name':
+ phase = seiscomp.datamodel.Phase()
+ phase.setCode(value)
+ pick.setPhaseHint(phase)
+
+ # event type
+ elif keyLower == 'event type':
+ evttype = EventTypes[value]
+ event.setType(evttype)
+ originComments[key] = value
+
+ # filter ID
+ elif keyLower == 'applied filter':
+ pick.setFilterID(value)
+
+ # channel code, prepended by configured Channel prefix if only
+ # one character is found
+ elif keyLower == 'component':
+ compCode = value
+
+ # pick evaluation mode
+ elif keyLower == 'pick type':
+ found = False
+ for mode in [seiscomp.datamodel.AUTOMATIC, seiscomp.datamodel.MANUAL]:
+ if value == seiscomp.datamodel.EEvaluationModeNames_name(mode):
+ pick.setEvaluationMode(mode)
+ found = True
+ break
+ if not found:
+ raise Exception('Unsupported evaluation mode value')
+
+ # pick author
+ elif keyLower == 'analyst':
+ creationInfo = seiscomp.datamodel.CreationInfo()
+ creationInfo.setAuthor(value)
+ pick.setCreationInfo(creationInfo)
+
+ # pick polarity
+ # isn't tested
+ elif keyLower == 'sign':
+ if value == 'positive':
+ sign = '0' # positive
+ elif value == 'negative':
+ sign = '1' # negative
+ else:
+ sign = '2' # unknown
+ pick.setPolarity(float(sign))
+
+ # arrival weight
+ elif keyLower == 'weight':
+ arrival.setWeight(float(value))
+
+ # arrival azimuth
+ elif keyLower == 'theo. azimuth (deg)':
+ arrival.setAzimuth(float(value))
+
+ # pick theo backazimuth
+ elif keyLower == 'theo. backazimuth (deg)':
+ if pick.slownessMethodID() == 'corrected':
+ seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
+ else:
+ pick.setBackazimuth(
+ seiscomp.datamodel.RealQuantity(float(value)))
+ pick.setSlownessMethodID('theoretical')
+
+ # pick beam slowness
+ elif keyLower == 'beam-slowness (sec/deg)':
+ if pick.slownessMethodID() == 'corrected':
+ seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
+ else:
+ pick.setHorizontalSlowness(
+ seiscomp.datamodel.RealQuantity(float(value)))
+ pick.setSlownessMethodID('Array Beam')
+
+ # pick beam backazimuth
+ elif keyLower == 'beam-azimuth (deg)':
+ if pick.slownessMethodID() == 'corrected':
+ seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
+ else:
+ pick.setBackazimuth(
+ seiscomp.datamodel.RealQuantity(float(value)))
+
+ # pick epi slowness
+ elif keyLower == 'epi-slowness (sec/deg)':
+ pick.setHorizontalSlowness(
+ seiscomp.datamodel.RealQuantity(float(value)))
+ pick.setSlownessMethodID('corrected')
+
+ # pick epi backazimuth
+ elif keyLower == 'epi-azimuth (deg)':
+ pick.setBackazimuth(seiscomp.datamodel.RealQuantity(float(value)))
+
+ # arrival distance degree
+ elif keyLower == 'distance (deg)':
+ arrival.setDistance(float(value))
+
+ # arrival distance km, recalculates for degree
+ elif keyLower == 'distance (km)':
+ if isinstance(arrival.distance(), float):
+ seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine-1, 'distance (deg)'))
+ arrival.setDistance(float(value) * km2degFac)
+
+ # arrival time residual
+ elif keyLower == 'residual time':
+ arrival.setTimeResidual(float(value))
+
+ # amplitude snr
+ elif keyLower == 'signal/noise':
+ amplitudeSNR = seiscomp.datamodel.Amplitude.Create()
+ amplitudeSNR.setType('SNR')
+ amplitudeSNR.setAmplitude(
+ seiscomp.datamodel.RealQuantity(float(value)))
+
+ # amplitude period
+ elif keyLower.startswith('period'):
+ ampPeriod = float(value)
+
+ # amplitude value for displacement
+ elif keyLower == 'amplitude (nm)':
+ amplitudeDisp = seiscomp.datamodel.Amplitude.Create()
+ amplitudeDisp.setAmplitude(
+ seiscomp.datamodel.RealQuantity(float(value)))
+ amplitudeDisp.setUnit('nm')
+
+ # amplitude value for velocity
+ elif keyLower.startswith('vel. amplitude'):
+ amplitudeVel = seiscomp.datamodel.Amplitude.Create()
+ amplitudeVel.setAmplitude(
+ seiscomp.datamodel.RealQuantity(float(value)))
+ amplitudeVel.setUnit('nm/s')
+
+ elif keyLower == 'bb amplitude (nm/sec)':
+ amplitudeBB = seiscomp.datamodel.Amplitude.Create()
+ amplitudeBB.setAmplitude(
+ seiscomp.datamodel.RealQuantity(float(value)))
+ amplitudeBB.setType('mB')
+ amplitudeBB.setUnit('nm/s')
+ amplitudeBB.setPeriod(seiscomp.datamodel.RealQuantity(ampBBPeriod))
+
+ elif keyLower == 'bb period (sec)':
+ ampBBPeriod = float(value)
+
+ elif keyLower == 'broadband magnitude':
+ magType = self.parseMagType('bb')
+ stationMagBB = seiscomp.datamodel.StationMagnitude.Create()
+ stationMagBB.setMagnitude(
+ seiscomp.datamodel.RealQuantity(float(value)))
+ stationMagBB.setType(magType)
+ stationMagBB.setAmplitudeID(amplitudeBB.publicID())
+
+ # ignored
+ elif keyLower == 'quality number':
+ seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
+
+ # station magnitude value and type
+ elif keyLower.startswith('magnitude '):
+ magType = self.parseMagType(key[10:])
+ stationMag = seiscomp.datamodel.StationMagnitude.Create()
+ stationMag.setMagnitude(
+ seiscomp.datamodel.RealQuantity(float(value)))
+
+ if len(magType) > 0:
+ stationMag.setType(magType)
+ if magType == 'mb':
+ stationMag.setAmplitudeID(amplitudeDisp.publicID())
+
+ elif magType == 'MS(BB)':
+ stationMag.setAmplitudeID(amplitudeVel.publicID())
+ else:
+ seiscomp.logging.debug('Line %i: Magnitude Type not known %s.' % (iLine, magType))
+
+ ###############################################################
+ # origin parameters
+
+ # event ID, added as origin comment later on
+ elif keyLower == 'event id':
+ originComments[key] = value
+
+ # magnitude value and type
+ elif keyLower == 'mean bb magnitude':
+ magType = self.parseMagType('bb')
+ if magnitudeBB is None:
+ magnitudeBB = seiscomp.datamodel.Magnitude.Create()
+ magnitudeBB.setMagnitude(
+ seiscomp.datamodel.RealQuantity(float(value)))
+ magnitudeBB.setType(magType)
+
+ elif keyLower.startswith('mean magnitude '):
+ magType = self.parseMagType(key[15:])
+
+ if magType == 'ML':
+ if magnitudeML is None:
+ magnitudeML = seiscomp.datamodel.Magnitude.Create()
+ magnitudeML.setMagnitude(
+ seiscomp.datamodel.RealQuantity(float(value)))
+ magnitudeML.setType(magType)
+
+ elif magType == 'Ms(BB)':
+ if magnitudeMS is None:
+ magnitudeMS = seiscomp.datamodel.Magnitude.Create()
+ magnitudeMS.setMagnitude(
+ seiscomp.datamodel.RealQuantity(float(value)))
+ magnitudeMS.setType(magType)
+
+ elif magType == 'mb':
+ if magnitudeMB is None:
+ magnitudeMB = seiscomp.datamodel.Magnitude.Create()
+ magnitudeMB.setMagnitude(
+ seiscomp.datamodel.RealQuantity(float(value)))
+ magnitudeMB.setType(magType)
+
+ else:
+ seiscomp.logging.warning('Line %i: Magnitude type %s not defined yet.' % (iLine, magType))
+
+ # latitude
+ elif keyLower == 'latitude':
+ origin.latitude().setValue(float(value))
+ latFound = True
+ elif keyLower == 'error in latitude (km)':
+ origin.latitude().setUncertainty(float(value))
+
+ # longitude
+ elif keyLower == 'longitude':
+ origin.longitude().setValue(float(value))
+ lonFound = True
+ elif keyLower == 'error in longitude (km)':
+ origin.longitude().setUncertainty(float(value))
+
+ # depth
+ elif keyLower == 'depth (km)':
+ origin.setDepth(seiscomp.datamodel.RealQuantity(float(value)))
+ if depthError is not None:
+ origin.depth().setUncertainty(depthError)
+ elif keyLower == 'depth type':
+ seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
+ elif keyLower == 'error in depth (km)':
+ depthError = float(value)
+ try:
+ origin.depth().setUncertainty(depthError)
+ except seiscomp.core.ValueException:
+ pass
+
+ # time
+ elif keyLower == 'origin time':
+ origin.time().setValue(self.parseTime(value))
+ elif keyLower == 'error in origin time':
+ origin.time().setUncertainty(float(value))
+
+ # location method
+ elif keyLower == 'location method':
+ origin.setMethodID(str(value))
+
+ # region table, added as origin comment later on
+ elif keyLower == 'region table':
+ originComments[key] = value
+
+ # region table, added as origin comment later on
+ elif keyLower == 'region id':
+ originComments[key] = value
+
+ # source region, added as origin comment later on
+ elif keyLower == 'source region':
+ originComments[key] = value
+
+ # used station count
+ elif keyLower == 'no. of stations used':
+ if originQuality is None:
+ originQuality = seiscomp.datamodel.OriginQuality()
+ originQuality.setUsedStationCount(int(value))
+
+ # ignored
+ elif keyLower == 'reference location name':
+ seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
+
+ # confidence ellipsoid major axis
+ elif keyLower == 'error ellipse major':
+ if originCE is None:
+ originCE = seiscomp.datamodel.ConfidenceEllipsoid()
+ originCE.setSemiMajorAxisLength(float(value))
+
+ # confidence ellipsoid minor axis
+ elif keyLower == 'error ellipse minor':
+ if originCE is None:
+ originCE = seiscomp.datamodel.ConfidenceEllipsoid()
+ originCE.setSemiMinorAxisLength(float(value))
+
+ # confidence ellipsoid rotation
+ elif keyLower == 'error ellipse strike':
+ if originCE is None:
+ originCE = seiscomp.datamodel.ConfidenceEllipsoid()
+ originCE.setMajorAxisRotation(float(value))
+
+ # azimuthal gap
+ elif keyLower == 'max azimuthal gap (deg)':
+ if originQuality is None:
+ originQuality = seiscomp.datamodel.OriginQuality()
+ originQuality.setAzimuthalGap(float(value))
+
+ # creation info author
+ elif keyLower == 'author':
+ origin.creationInfo().setAuthor(value)
+
+ # creation info agency
+ elif keyLower == 'source of information':
+ origin.creationInfo().setAgencyID(value)
+
+ # earth model id
+ elif keyLower == 'velocity model':
+ origin.setEarthModelID(value)
+
+ # standard error
+ elif keyLower == 'rms of residuals (sec)':
+ if originQuality is None:
+ originQuality = seiscomp.datamodel.OriginQuality()
+ originQuality.setStandardError(float(value))
+
+ # ignored
+ elif keyLower == 'phase flags':
+ seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
+
+ # ignored
+ elif keyLower == 'location input params':
+ seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
+
+ # missing keys
+ elif keyLower == 'ampl&period source':
+ seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
+
+ elif keyLower == 'location quality':
+ seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
+
+ elif keyLower == 'reference latitude':
+ seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
+
+ elif keyLower == 'reference longitude':
+ seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
+
+ elif keyLower.startswith('amplitude time'):
+ seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
+
+ # unknown key
+ else:
+ seiscomp.logging.warning('Line %i: ignoring unknown parameter: %s' % (iLine, key))
+
+ except ValueError as ve:
+ seiscomp.logging.warning('Line %i: can not parse %s value' % (iLine, key))
+ except Exception:
+ seiscomp.logging.error('Line %i: %s' % (iLine, str(traceback.format_exc())))
+ return None
+
+ # check
+ if not latFound:
+ seiscomp.logging.warning('could not add origin, missing latitude parameter')
+ elif not lonFound:
+ seiscomp.logging.warning('could not add origin, missing longitude parameter')
+ elif not origin.time().value().valid():
+ seiscomp.logging.warning('could not add origin, missing origin time parameter')
+ else:
+ if magnitudeMB is not None:
+ origin.add(magnitudeMB)
+ if magnitudeML is not None:
+ origin.add(magnitudeML)
+ if magnitudeMS is not None:
+ origin.add(magnitudeMS)
+ if magnitudeBB is not None:
+ origin.add(magnitudeBB)
+
+ ep.add(event)
+ ep.add(origin)
+
+ if originQuality is not None:
+ origin.setQuality(originQuality)
+
+ if originCE is not None:
+ uncertainty = seiscomp.datamodel.OriginUncertainty()
+ uncertainty.setConfidenceEllipsoid(originCE)
+ origin.setUncertainty(uncertainty)
+
+ for k, v in originComments.items():
+ comment = seiscomp.datamodel.Comment()
+ comment.setId(k)
+ comment.setText(v)
+ origin.add(comment)
+
+ return ep
+
+ ###########################################################################
+ def run(self):
+ self.loadStreams()
+
+ try:
+ if self.inputFile == '-':
+ f = sys.stdin
+ else:
+ f = open(self.inputFile)
+ except IOError as e:
+ seiscomp.logging.error(str(e))
+ return False
+
+ ep = self.sh2proc(f)
+ if ep is None:
+ return False
+
+ ar = seiscomp.io.XMLArchive()
+ ar.create('-')
+ ar.setFormattedOutput(True)
+ ar.writeObject(ep)
+ ar.close()
+
+ return True
+
+
+###############################################################################
+def main():
+ try:
+ app = SH2Proc()
+ return app()
+ except:
+ sys.stderr.write(str(traceback.format_exc()))
+
+ return 1
+
+
+if __name__ == '__main__':
+ sys.exit(main())
+
+
+# vim: ts=4 et
diff --git a/bin/slarchive b/bin/slarchive
new file mode 100755
index 0000000..f742847
Binary files /dev/null and b/bin/slarchive differ
diff --git a/bin/slinktool b/bin/slinktool
new file mode 100755
index 0000000..29fa619
Binary files /dev/null and b/bin/slinktool differ
diff --git a/bin/slmon b/bin/slmon
new file mode 100755
index 0000000..3ea75b2
--- /dev/null
+++ b/bin/slmon
@@ -0,0 +1,483 @@
+#!/usr/bin/env seiscomp-python
+
+from __future__ import print_function
+from getopt import getopt, GetoptError
+from time import time, gmtime
+from datetime import datetime
+import os, sys, signal, glob, re
+from seiscomp.myconfig import MyConfig
+import seiscomp.slclient
+import seiscomp.kernel, seiscomp.config
+
+usage_info = """
+Usage:
+ slmon [options]
+
+SeedLink monitor creating static web pages
+
+Options:
+ -h, --help display this help message
+ -c ini_setup = arg
+ -s ini_stations = arg
+ -t refresh = float(arg) # XXX not yet used
+ -v verbose = 1
+
+Examples:
+Start slmon from the command line
+ slmon -c $SEISCOMP_ROOT/var/lib/slmon/config.ini
+
+Restart slmon in order to update the web pages. Use crontab entries for
+automatic restart, e.g.:
+ */3 * * * * /home/sysop/seiscomp/bin/seiscomp check slmon >/dev/null 2>&1
+"""
+
+def usage(exitcode=0):
+ sys.stderr.write(usage_info)
+ exit(exitcode)
+
+try:
+ seiscompRoot=os.environ["SEISCOMP_ROOT"]
+except:
+ print("\nSEISCOMP_ROOT must be defined - EXIT\n", file=sys.stderr)
+ usage(exitcode=2)
+
+ini_stations = os.path.join(seiscompRoot,'var/lib/slmon/stations.ini')
+ini_setup = os.path.join(seiscompRoot,'var/lib/slmon/config.ini')
+
+regexStreams = re.compile("[SLBVEH][HNLG][ZNE123]")
+
+verbose = 0
+
+class Module(seiscomp.kernel.Module):
+ def __init__(self, env):
+ seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__))
+
+ def printCrontab(self):
+ print("3 * * * * %s/bin/seiscomp check slmon >/dev/null 2>&1" % (self.env.SEISCOMP_ROOT))
+
+class Status:
+
+ def __repr__(self):
+ return "%2s %-5s %2s %3s %1s %s %s" % \
+ (self.net, self.sta, self.loc, self.cha, self.typ, \
+ str(self.last_data), str(self.last_feed))
+class StatusDict(dict):
+
+ def __init__(self, source=None):
+ if source:
+ self.read(source)
+
+ def fromSlinkTool(self,server="",stations=["GE_MALT","GE_MORC","GE_IBBN"]):
+ # later this shall use XML
+ cmd = "slinktool -nd 10 -nt 10 -Q %s" % server
+ print(cmd)
+ f = os.popen(cmd)
+ # regex = re.compile("[SLBVEH][HNLG][ZNE123]")
+ regex = regexStreams
+ for line in f:
+ net_sta = line[:2].strip() + "_" + line[3:8].strip()
+ if not net_sta in stations:
+ continue
+ typ = line[16]
+ if typ != "D":
+ continue
+ cha = line[12:15].strip()
+ if not regex.match(cha):
+ continue
+
+ d = Status()
+ d.net = line[ 0: 2].strip()
+ d.sta = line[ 3: 8].strip()
+ d.loc = line[ 9:11].strip()
+ d.cha = line[12:15]
+ d.typ = line[16]
+ d.last_data = seiscomp.slclient.timeparse(line[47:70])
+ d.last_feed = d.last_data
+ sec = "%s_%s" % (d.net, d.sta)
+ sec = "%s.%s.%s.%s.%c" % (d.net, d.sta, d.loc, d.cha, d.typ)
+ self[sec] = d
+
+ def read(self, source):
+ if type(source) == str:
+ source = file(source)
+ if type(source) == file:
+ source = source.readlines()
+ if type(source) != list:
+ raise TypeError('cannot read from %s' % str(type(source)))
+
+ for line in source:
+ d = Status()
+ d.net = line[ 0: 2]
+ d.sta = line[ 3: 8].strip()
+ d.loc = line[ 9:11].strip()
+ d.cha = line[12:15]
+ d.typ = line[16]
+ d.last_data = seiscomp.slclient.timeparse(line[18:41])
+ d.last_feed = seiscomp.slclient.timeparse(line[42:65])
+ if d.last_feed < d.last_data:
+ d.last_feed = d.last_data
+ sec = "%s_%s:%s.%s.%c" % (d.net, d.sta, d.loc, d.cha, d.typ)
+ self[sec] = d
+
+ def write(self, f):
+ if type(f) is str:
+ f = file(f, "w")
+ lines = []
+ for key in list(self.keys()):
+ lines.append(str(self[key]))
+ lines.sort()
+ f.write('\n'.join(lines)+'\n')
+
+
+def colorLegend(htmlfile):
+ htmlfile.write("
Latencies: \n" \
+ "
\n
\n" \
+ "
<30 m
\n" \
+ "
< 1 h
\n" \
+ "
< 2 h
\n" \
+ "
< 6 h
\n" \
+ "
< 1 d
\n" \
+ "
< 2 d
\n" \
+ "
< 3 d
\n" \
+ "
< 4 d
\n" \
+ "
< 5 d
\n" \
+ "
> 5 d
\n" \
+ "
\n
\n
\n")
+
+# encodes an email address so that it cannot (easily) be extracted
+# from the web page. This is meant to be a spam protection.
+def encode(txt): return ''.join(["%d;" % ord(c) for c in txt])
+
+def total_seconds(td): return td.seconds + (td.days*86400)
+
+def pageTrailer(htmlfile, config):
+
+ htmlfile.write("\n" \
+ "