[installation] Initial commit with first config
commit
906e9ccf6b
@ -0,0 +1,5 @@
|
||||
var
|
||||
*.pyc
|
||||
HYPO*
|
||||
RESET*
|
||||
share/maps
|
Binary file not shown.
@ -0,0 +1,82 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
import seiscomp.datamodel
|
||||
import seiscomp.io
|
||||
import getopt
|
||||
import sys
|
||||
|
||||
|
||||
usage = """arclink2inv [options] input=stdin output=stdout
|
||||
|
||||
Options:
|
||||
-h [ --help ] Produce help message
|
||||
-f [ --formatted ] Enable formatted XML output
|
||||
"""
|
||||
|
||||
|
||||
def main(argv):
|
||||
imp = seiscomp.io.Importer.Create("arclink")
|
||||
if imp is None:
|
||||
sys.stderr.write("Arclink import not available\n")
|
||||
return 1
|
||||
|
||||
formatted = False
|
||||
|
||||
# parse command line options
|
||||
try:
|
||||
opts, args = getopt.getopt(argv[1:], "hf", ["help", "formatted"])
|
||||
except getopt.error as msg:
|
||||
sys.stderr.write("%s\n" % msg)
|
||||
sys.stderr.write("for help use --help\n")
|
||||
return 1
|
||||
|
||||
for o, a in opts:
|
||||
if o in ["-h", "--help"]:
|
||||
sys.stderr.write("%s\n" % usage)
|
||||
return 1
|
||||
elif o in ["-f", "--formatted"]:
|
||||
formatted = True
|
||||
|
||||
argv = args
|
||||
|
||||
if len(argv) > 0:
|
||||
o = imp.read(argv[0])
|
||||
else:
|
||||
o = imp.read("-")
|
||||
|
||||
inv = seiscomp.datamodel.Inventory.Cast(o)
|
||||
if inv is None:
|
||||
sys.stderr.write("No inventory found\n")
|
||||
return 1
|
||||
|
||||
ar = seiscomp.io.XMLArchive()
|
||||
if len(argv) > 1:
|
||||
res = ar.create(argv[1])
|
||||
else:
|
||||
res = ar.create("-")
|
||||
|
||||
if not res:
|
||||
sys.stderr.write("Failed to open output\n")
|
||||
return 1
|
||||
|
||||
ar.setFormattedOutput(formatted)
|
||||
ar.writeObject(inv)
|
||||
ar.close()
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv))
|
@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
############################################################################
|
||||
# Copyright (C) gempa GmbH #
|
||||
# All rights reserved. #
|
||||
# Contact: gempa GmbH (seiscomp-dev@gempa.de) #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
# #
|
||||
# Other Usage #
|
||||
# Alternatively, this file may be used in accordance with the terms and #
|
||||
# conditions contained in a signed written agreement between you and #
|
||||
# gempa GmbH. #
|
||||
############################################################################
|
||||
|
||||
import seiscomp.bindings2cfg
|
||||
import sys
|
||||
|
||||
sys.exit(seiscomp.bindings2cfg.main())
|
Binary file not shown.
@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
from seiscomp import mseedlite as mseed
|
||||
|
||||
open_files = {}
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: extr_file FILE")
|
||||
sys.exit(1)
|
||||
|
||||
for rec in mseed.Input(open(sys.argv[1], "rb")):
|
||||
oname = "%s.%s.%s.%s" % (rec.sta, rec.net, rec.loc, rec.cha)
|
||||
|
||||
if oname not in open_files:
|
||||
postfix = ".D.%04d.%03d.%02d%02d" % (rec.begin_time.year,
|
||||
rec.begin_time.timetuple()[7], rec.begin_time.hour,
|
||||
rec.begin_time.minute)
|
||||
|
||||
open_files[oname] = open(oname + postfix, "ab")
|
||||
|
||||
ofile = open_files[oname]
|
||||
ofile.write(rec.header + rec.data)
|
||||
|
||||
for oname in open_files:
|
||||
open_files[oname].close()
|
||||
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -0,0 +1,134 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
import sys
|
||||
import os
|
||||
import subprocess
|
||||
import glob
|
||||
import seiscomp.client
|
||||
|
||||
|
||||
class Importer(seiscomp.client.Application):
|
||||
def __init__(self, argc, argv):
|
||||
seiscomp.client.Application.__init__(self, argc, argv)
|
||||
|
||||
self.setMessagingEnabled(False)
|
||||
self.setDatabaseEnabled(False, False)
|
||||
|
||||
self._args = argv[1:]
|
||||
|
||||
def run(self):
|
||||
if len(self._args) == 0:
|
||||
sys.stderr.write(
|
||||
"Usage: import_inv [{format}|help] <input> [output]\n")
|
||||
return False
|
||||
|
||||
if self._args[0] == "help":
|
||||
if len(self._args) < 2:
|
||||
sys.stderr.write("'help' can only be used with 'formats'\n")
|
||||
sys.stderr.write("import_inv help formats\n")
|
||||
return False
|
||||
|
||||
if self._args[1] == "formats":
|
||||
return self.printFormats()
|
||||
|
||||
sys.stderr.write("unknown topic '%s'\n" % self._args[1])
|
||||
return False
|
||||
|
||||
fmt = self._args[0]
|
||||
try:
|
||||
prog = os.path.join(
|
||||
os.environ['SEISCOMP_ROOT'], "bin", "%s2inv" % fmt)
|
||||
except:
|
||||
sys.stderr.write(
|
||||
"Could not get SeisComP root path, SEISCOMP_ROOT not set?\n")
|
||||
return False
|
||||
|
||||
if not os.path.exists(prog):
|
||||
sys.stderr.write("Format '%s' is not supported\n" % fmt)
|
||||
return False
|
||||
|
||||
if len(self._args) < 2:
|
||||
sys.stderr.write("Input missing\n")
|
||||
return False
|
||||
|
||||
input = self._args[1]
|
||||
|
||||
if len(self._args) < 3:
|
||||
filename = os.path.basename(os.path.abspath(input))
|
||||
if not filename:
|
||||
filename = fmt
|
||||
|
||||
# Append .xml if the ending is not already .xml
|
||||
if filename[-4:] != ".xml":
|
||||
filename = filename + ".xml"
|
||||
storage_dir = os.path.join(
|
||||
os.environ['SEISCOMP_ROOT'], "etc", "inventory")
|
||||
output = os.path.join(storage_dir, filename)
|
||||
try:
|
||||
os.makedirs(storage_dir)
|
||||
except:
|
||||
pass
|
||||
sys.stderr.write("Generating output to %s\n" % output)
|
||||
else:
|
||||
output = self._args[2]
|
||||
|
||||
proc = subprocess.Popen([prog, input, output],
|
||||
stdout=None, stderr=None, shell=False)
|
||||
chans = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
sys.stderr.write(
|
||||
"Conversion failed, return code: %d\n" % proc.returncode)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def printFormats(self):
|
||||
try:
|
||||
path = os.path.join(os.environ['SEISCOMP_ROOT'], "bin", "*2inv")
|
||||
except:
|
||||
sys.stderr.write(
|
||||
"Could not get SeisComP root path, SEISCOMP_ROOT not set?\n")
|
||||
return False
|
||||
|
||||
files = glob.glob(path)
|
||||
for f in files:
|
||||
prog = os.path.basename(f)
|
||||
prog = prog[:prog.find("2inv")]
|
||||
sys.stdout.write("%s\n" % prog)
|
||||
|
||||
return True
|
||||
|
||||
def printUsage(self):
|
||||
|
||||
print('''Usage:
|
||||
import_inv [FORMAT] input [output]
|
||||
import_inv help [topic]
|
||||
|
||||
Import inventory information from various sources.''')
|
||||
|
||||
seiscomp.client.Application.printUsage(self)
|
||||
|
||||
print('''Examples:
|
||||
List all supported inventory formats
|
||||
import_inv help formats
|
||||
|
||||
Convert from FDSN stationXML to SeisComp format
|
||||
import_inv fdsnxml inventory_fdsnws.xml inventory_sc.xml
|
||||
''')
|
||||
|
||||
if __name__ == "__main__":
|
||||
app = Importer(len(sys.argv), sys.argv)
|
||||
sys.exit(app())
|
@ -0,0 +1,278 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
|
||||
from __future__ import print_function
|
||||
import sys, os
|
||||
import csv
|
||||
from optparse import OptionParser
|
||||
|
||||
def quote(instr):
|
||||
return '"'+instr+'"'
|
||||
|
||||
class base(object):
|
||||
def __init__(self, filename, fields):
|
||||
self.att = {}
|
||||
fd = open(filename)
|
||||
try:
|
||||
try:
|
||||
fieldNames = None
|
||||
for row in csv.DictReader(fd, fieldNames):
|
||||
id = row['id']
|
||||
if id in self.att:
|
||||
print("multiple %s found in %s" % (id, filename))
|
||||
continue
|
||||
|
||||
for key in fields:
|
||||
if not row[key]:
|
||||
del(row[key])
|
||||
|
||||
del row['id']
|
||||
|
||||
try:
|
||||
row['low_freq'] = float(row['low_freq'])
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
row['high_freq'] = float(row['high_freq'])
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
self.att[id] = row
|
||||
|
||||
except KeyError as e:
|
||||
raise Exception("column %s missing in %s" % (str(e), filename))
|
||||
|
||||
except (TypeError, ValueError) as e:
|
||||
raise Exception("error reading %s: %s" % (filename, str(e)))
|
||||
|
||||
finally:
|
||||
fd.close()
|
||||
|
||||
def keys(self):
|
||||
return list(self.att.keys())
|
||||
|
||||
def screname(self, what):
|
||||
nc = ""
|
||||
nu = True
|
||||
for c in what:
|
||||
if c == '_':
|
||||
nu = True
|
||||
continue
|
||||
if nu:
|
||||
nc += c.upper()
|
||||
nu = False
|
||||
else:
|
||||
nc += c
|
||||
|
||||
if nc == 'LowFreq': nc = 'LowFrequency'
|
||||
if nc == 'HighFreq': nc = 'HighFrequency'
|
||||
|
||||
return nc
|
||||
|
||||
def reorder(self):
|
||||
att = {}
|
||||
if not self.att:
|
||||
return None
|
||||
|
||||
for (code, row) in self.att.items():
|
||||
for (k, v) in row.items():
|
||||
k = self.screname(k)
|
||||
try:
|
||||
dk = att[k]
|
||||
except:
|
||||
dk = {}
|
||||
att[k] = dk
|
||||
|
||||
try:
|
||||
dv = dk[str(v)]
|
||||
except:
|
||||
dv = []
|
||||
dk[str(v)] = dv
|
||||
|
||||
dv.append(code)
|
||||
return att
|
||||
|
||||
def dump(self, fdo):
|
||||
att = self.reorder()
|
||||
lastK=None
|
||||
|
||||
for (k, v) in att.items():
|
||||
if not lastK: lastK = k
|
||||
if lastK != k:
|
||||
fdo.write("\n")
|
||||
for (kv, ids) in v.items():
|
||||
fdo.write("Ia: %s=%s" % (k,quote(kv)))
|
||||
for id in ids:
|
||||
fdo.write(" %s" % id)
|
||||
fdo.write("\n")
|
||||
fdo.write("\n")
|
||||
|
||||
class sensorAttributes(base):
|
||||
def __init__(self, filename):
|
||||
base.__init__(self, filename, ['id', 'type','unit', 'low_freq', 'high_freq', 'model', 'manufacturer', 'remark'])
|
||||
|
||||
class dataloggerAttributes(base):
|
||||
def __init__(self, filename):
|
||||
base.__init__(self, filename, ['id', 'digitizer_model', 'digitizer_manufacturer', 'recorder_model', 'recorder_manufacturer', 'clock_model', 'clock_manufacturer', 'clock_type', 'remark'])
|
||||
|
||||
class INST(object):
|
||||
def cleanID(self, id):
|
||||
nc = ""
|
||||
for c in id:
|
||||
nc += c
|
||||
if c == '_':
|
||||
nc = ""
|
||||
|
||||
return nc
|
||||
|
||||
def __init__(self, filename, attS, attD):
|
||||
self.filename = filename
|
||||
self.sensorA = sensorAttributes(attS)
|
||||
self.dataloggerA = dataloggerAttributes(attD)
|
||||
lines = []
|
||||
f = open(filename)
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line or line[0] == '#':
|
||||
# Add comments line types
|
||||
lines.append({ 'content': line, 'type': 'C', 'id': None})
|
||||
else:
|
||||
(id, line) = line.split(">", 1)
|
||||
id = id.strip()
|
||||
line = line.strip()
|
||||
# Add undefined line types
|
||||
lines.append({ 'content': line, 'type': 'U', 'id': id})
|
||||
f.close()
|
||||
self.lines = lines
|
||||
self._filltypes()
|
||||
|
||||
def _filltypes(self):
|
||||
for line in self.lines:
|
||||
if line['type'] != 'U': continue
|
||||
id = line['id']
|
||||
if id.find('_FIR_') != -1:
|
||||
line['type'] = 'F'
|
||||
elif id.find('Sngl-gain_') != -1:
|
||||
line['type'] = 'L'
|
||||
line['id'] = self.cleanID(id)
|
||||
elif id.find('_digipaz_') != -1:
|
||||
line['type'] = 'P'
|
||||
elif id.find('_iirpaz_') != -1:
|
||||
line['type'] = 'I'
|
||||
|
||||
for line in self.lines:
|
||||
if line['type'] != 'U': continue
|
||||
id = self.cleanID(line['id'])
|
||||
|
||||
if id in list(self.sensorA.keys()):
|
||||
line['type'] = 'S'
|
||||
line['id'] = id
|
||||
elif id in list(self.dataloggerA.keys()):
|
||||
line['type'] = 'D'
|
||||
line['id'] = id
|
||||
# Those we are forcing !
|
||||
elif id in ['OSIRIS-SC', 'Gaia', 'LE24', 'MALI', 'PSS', 'FDL', 'CMG-SAM', 'CMG-DCM', 'EDAS-24', 'SANIAC']:
|
||||
line['id'] = id
|
||||
line['type'] = 'D'
|
||||
elif id in ['Trillium-Compact', 'Reftek-151/120', 'BBVS-60', 'CMG-3ESP/60F', 'LE-1D/1', 'L4-3D/BW', 'S13', 'GS13', 'SH-1', 'MP', 'MARKL22', 'CM-3', 'CMG-6T', 'SM-6/BW']:
|
||||
line['id'] = id
|
||||
line['type'] = 'S'
|
||||
|
||||
for line in self.lines:
|
||||
if line['type'] == 'U':
|
||||
print("'"+self.cleanID(line['id'])+"', ", end=' ')
|
||||
|
||||
def dump(self, fdo):
|
||||
sa = False
|
||||
da = False
|
||||
|
||||
dataloggerFieldSize = 0
|
||||
sensorFieldSize = 0
|
||||
for line in self.lines:
|
||||
if line['type'] == 'C': continue
|
||||
if line['type'] == 'S':
|
||||
if len(line['id']) > sensorFieldSize:
|
||||
sensorFieldSize = len(line['id'])
|
||||
if line['type'] == 'D':
|
||||
if len(line['id']) > dataloggerFieldSize:
|
||||
dataloggerFieldSize = len(line['id'])
|
||||
|
||||
seLine = "Se: %%%ss %%s\n" % (-1*(sensorFieldSize+1))
|
||||
dtLine = "Dl: %%%ss %%s\n" % (-1*(dataloggerFieldSize+1))
|
||||
for line in self.lines:
|
||||
if line['type'] == 'C':
|
||||
fdo.write(line['content'] + "\n")
|
||||
continue
|
||||
|
||||
if line['type'] == 'S':
|
||||
if not sa:
|
||||
self.sensorA.dump(fdo)
|
||||
sa = True
|
||||
fdo.write(seLine % (line['id'], line['content']))
|
||||
continue
|
||||
|
||||
if line['type'] == 'D':
|
||||
if not da:
|
||||
self.dataloggerA.dump(fdo)
|
||||
da = True
|
||||
fdo.write(dtLine % (line['id'], line['content']))
|
||||
continue
|
||||
|
||||
if line['type'] == 'L':
|
||||
fdo.write("Cl: %s %s\n" % (line['id'], line['content']))
|
||||
continue
|
||||
|
||||
if line['type'] == 'F':
|
||||
fdo.write("Ff: %s %s\n" % (line['id'], line['content']))
|
||||
continue
|
||||
|
||||
if line['type'] == 'P':
|
||||
fdo.write("Pz: %s %s\n" % (line['id'], line['content']))
|
||||
continue
|
||||
|
||||
|
||||
if line['type'] == 'I':
|
||||
fdo.write("If: %s %s\n" % (line['id'], line['content']))
|
||||
continue
|
||||
|
||||
def main():
|
||||
|
||||
parser = OptionParser(usage="Old tab to New tab converter", version="1.0", add_help_option=True)
|
||||
|
||||
parser.add_option("", "--sat", type="string",
|
||||
help="Indicates the sensor attribute file to use", dest="sat", default="sensor_attr.csv")
|
||||
parser.add_option("", "--dat", type="string",
|
||||
help="Indicates the station attribute file to use", dest="dat", default="datalogger_attr.csv")
|
||||
parser.add_option("-c", "--clean", action="store_true",
|
||||
help="Remove the comments and blank lines", dest="cleanFile", default=False)
|
||||
|
||||
# Parsing & Error check
|
||||
(options, args) = parser.parse_args()
|
||||
errors = []
|
||||
|
||||
if len(args) != 1:
|
||||
errors.append("need an Input filename")
|
||||
|
||||
if not os.path.isfile(options.sat):
|
||||
errors.append("sensor attribute file '%s' not found." % options.sat)
|
||||
|
||||
if not os.path.isfile(options.dat):
|
||||
errors.append("datalogger attribute file '%s' not found." % options.dat)
|
||||
|
||||
if len(args) == 2 and os.path.isfile(args[1]):
|
||||
errors.append("output file already exists, will not overwrite.")
|
||||
|
||||
if errors:
|
||||
print("Found error while processing the command line:", file=sys.stderr)
|
||||
for error in errors:
|
||||
print(" %s" % error, file=sys.stderr)
|
||||
return 1
|
||||
|
||||
inputName = args[0]
|
||||
i= INST(inputName, options.sat, options.dat)
|
||||
fdo = sys.stdout if len(args) < 2 else open(args[1],"w")
|
||||
i.dump(fdo)
|
||||
fdo.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -0,0 +1,98 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
from __future__ import (absolute_import, division, print_function, unicode_literals)
|
||||
|
||||
import sys
|
||||
import io
|
||||
from seiscomp.legacy.fseed import *
|
||||
from seiscomp.legacy.db.seiscomp3 import sc3wrap
|
||||
from seiscomp.legacy.db.seiscomp3.inventory import Inventory
|
||||
import seiscomp.datamodel, seiscomp.io
|
||||
|
||||
ORGANIZATION = "EIDA"
|
||||
|
||||
|
||||
def iterinv(obj):
|
||||
return (j for i in obj.values() for j in i.values())
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 1 or len(sys.argv) > 3:
|
||||
sys.stderr.write("Usage inv2dlsv [in_xml [out_dataless]]\n")
|
||||
return 1
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
inFile = sys.argv[1]
|
||||
else:
|
||||
inFile = "-"
|
||||
|
||||
if len(sys.argv) > 2:
|
||||
out = sys.argv[2]
|
||||
else:
|
||||
out = ""
|
||||
|
||||
sc3wrap.dbQuery = None
|
||||
|
||||
ar = seiscomp.io.XMLArchive()
|
||||
if ar.open(inFile) == False:
|
||||
raise IOError(inFile + ": unable to open")
|
||||
|
||||
obj = ar.readObject()
|
||||
if obj is None:
|
||||
raise TypeError(inFile + ": invalid format")
|
||||
|
||||
sc3inv = seiscomp.datamodel.Inventory.Cast(obj)
|
||||
if sc3inv is None:
|
||||
raise TypeError(inFile + ": invalid format")
|
||||
|
||||
inv = Inventory(sc3inv)
|
||||
inv.load_stations("*", "*", "*", "*")
|
||||
inv.load_instruments()
|
||||
|
||||
vol = SEEDVolume(inv, ORGANIZATION, "", resp_dict=False)
|
||||
|
||||
for net in iterinv(inv.network):
|
||||
for sta in iterinv(net.station):
|
||||
for loc in iterinv(sta.sensorLocation):
|
||||
for strm in iterinv(loc.stream):
|
||||
try:
|
||||
vol.add_chan(net.code, sta.code, loc.code,
|
||||
strm.code, strm.start, strm.end)
|
||||
|
||||
except SEEDError as e:
|
||||
sys.stderr.write("Error (%s,%s,%s,%s): %s\n" % (
|
||||
net.code, sta.code, loc.code, strm.code, str(e)))
|
||||
|
||||
if not out or out == "-":
|
||||
output = io.BytesIO()
|
||||
vol.output(output)
|
||||
stdout = sys.stdout.buffer if hasattr(sys.stdout, "buffer") else sys.stdout
|
||||
stdout.write(output.getvalue())
|
||||
stdout.flush()
|
||||
output.close()
|
||||
else:
|
||||
with open(sys.argv[2], "wb") as fd:
|
||||
vol.output(fd)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
sys.exit(main())
|
||||
except Exception as e:
|
||||
sys.stderr.write("Error: %s" % str(e))
|
||||
sys.exit(1)
|
Binary file not shown.
Binary file not shown.
@ -0,0 +1,280 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import sys
|
||||
import os
|
||||
import time
|
||||
import datetime
|
||||
import calendar
|
||||
import stat
|
||||
|
||||
from getopt import getopt, GetoptError
|
||||
from seiscomp import mseedlite as mseed
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
def read_mseed_with_delays(delaydict, reciterable):
|
||||
"""
|
||||
Create an iterator which takes into account configurable realistic delays.
|
||||
|
||||
This function creates an iterator which returns one miniseed record at a time.
|
||||
Artificial delays can be introduced by using delaydict.
|
||||
|
||||
This function can be used to make simulations in real time more realistic
|
||||
when e.g. some stations have a much higher delay than others due to
|
||||
narrow bandwidth communication channels etc.
|
||||
|
||||
A delaydict has the following data structure:
|
||||
keys: XX.ABC (XX: network code, ABC: station code). The key "default" is
|
||||
a special value for the default delay.
|
||||
values: Delay to be introduced in seconds
|
||||
|
||||
This function will rearrange the iterable object which has been used as
|
||||
input for rt_simul() so that it can again be used by rt_simul but taking
|
||||
artificial delays into account.
|
||||
"""
|
||||
import heapq #pylint: disable=C0415
|
||||
|
||||
heap = []
|
||||
min_delay = 0
|
||||
default_delay = 0
|
||||
if 'default' in delaydict:
|
||||
default_delay = delaydict['default']
|
||||
for rec in reciterable:
|
||||
rec_time = calendar.timegm(rec.end_time.timetuple())
|
||||
delay_time = rec_time
|
||||
stationname = "%s.%s" % (rec.net, rec.sta)
|
||||
if stationname in delaydict:
|
||||
delay_time = rec_time + delaydict[stationname]
|
||||
else:
|
||||
delay_time = rec_time + default_delay
|
||||
heapq.heappush(heap, (delay_time, rec))
|
||||
toprectime = heap[0][0]
|
||||
if toprectime - min_delay < rec_time:
|
||||
topelement = heapq.heappop(heap)
|
||||
yield topelement
|
||||
while heap:
|
||||
topelement = heapq.heappop(heap)
|
||||
yield topelement
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
def rt_simul(f, speed=1., jump=0., delaydict=None):
|
||||
"""
|
||||
Iterator to simulate "real-time" MSeed input
|
||||
|
||||
At startup, the first MSeed record is read. The following records are
|
||||
read in pseudo-real-time relative to the time of the first record,
|
||||
resulting in data flowing at realistic speed. This is useful e.g. for
|
||||
demonstrating real-time processing using real data of past events.
|
||||
|
||||
The data in the input file may be multiplexed, but *must* be sorted by
|
||||
time, e.g. using 'mssort'.
|
||||
"""
|
||||
rtime = time.time()
|
||||
etime = None
|
||||
skipping = True
|
||||
record_iterable = mseed.Input(f)
|
||||
if delaydict:
|
||||
record_iterable = read_mseed_with_delays(delaydict, record_iterable)
|
||||
for rec in record_iterable:
|
||||
if delaydict:
|
||||
rec_time = rec[0]
|
||||
rec = rec[1]
|
||||
else:
|
||||
rec_time = calendar.timegm(rec.end_time.timetuple())
|
||||
if etime is None:
|
||||
etime = rec_time
|
||||
|
||||
if skipping:
|
||||
if (rec_time - etime) / 60.0 < jump:
|
||||
continue
|
||||
|
||||
etime = rec_time
|
||||
skipping = False
|
||||
|
||||
tmax = etime + speed * (time.time() - rtime)
|
||||
ms = 1000000.0 * (rec.nsamp / rec.fsamp)
|
||||
last_sample_time = rec.begin_time + datetime.timedelta(microseconds=ms)
|
||||
last_sample_time = calendar.timegm(last_sample_time.timetuple())
|
||||
if last_sample_time > tmax:
|
||||
time.sleep((last_sample_time - tmax + 0.001) / speed)
|
||||
yield rec
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
def usage():
|
||||
print('''Usage:
|
||||
msrtsimul [options] file
|
||||
|
||||
MiniSEED real time playback and simulation
|
||||
|
||||
msrtsimul reads sorted (and possibly multiplexed) MiniSEED files and writes
|
||||
individual records in pseudo-real-time. This is useful e.g. for testing and
|
||||
simulating data acquisition. Output is
|
||||
$SEISCOMP_ROOT/var/run/seedlink/mseedfifo unless --seedlink or -c is used.
|
||||
|
||||
|
||||
Options:
|
||||
-c, --stdout write on standard output
|
||||
-d, --delays add artificial delays
|
||||
-s, --speed speed factor (float)
|
||||
-j, --jump minutes to skip (float)
|
||||
--test test mode
|
||||
-m --mode choose between 'realtime' and 'historic'
|
||||
--seedlink choose the seedlink module name. Useful if a seedlink
|
||||
alias or non-standard names are used. Replaces 'seedlink'
|
||||
in the standard mseedfifo path.
|
||||
-v, --verbose verbose mode
|
||||
-h, --help display this help message
|
||||
|
||||
Examples:
|
||||
Play back miniSEED waveforms in real time with verbose output
|
||||
msrtsimul -v miniSEED-file
|
||||
''')
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
def main():
|
||||
py2 = sys.version_info < (3,)
|
||||
|
||||
ifile = sys.stdin if py2 else sys.stdin.buffer
|
||||
verbosity = 0
|
||||
speed = 1.
|
||||
jump = 0.
|
||||
test = False
|
||||
seedlink = 'seedlink'
|
||||
mode = 'realtime'
|
||||
setSystemTime = False
|
||||
|
||||
try:
|
||||
opts, args = getopt(sys.argv[1:], "cd:s:j:vhm:",
|
||||
["stdout", "delays=", "speed=", "jump=", "test",
|
||||
"verbose", "help", "mode=", "seedlink="])
|
||||
except GetoptError:
|
||||
usage()
|
||||
return 1
|
||||
|
||||
out_channel = None
|
||||
delays = None
|
||||
|
||||
for flag, arg in opts:
|
||||
if flag in ("-c", "--stdout"):
|
||||
out_channel = sys.stdout if py2 else sys.stdout.buffer
|
||||
elif flag in ("-d", "--delays"):
|
||||
delays = arg
|
||||
elif flag in ("-s", "--speed"):
|
||||
speed = float(arg)
|
||||
elif flag in ("-j", "--jump"):
|
||||
jump = float(arg)
|
||||
elif flag in ("-m", "--mode"):
|
||||
mode = arg
|
||||
elif flag == "--seedlink":
|
||||
seedlink = arg
|
||||
elif flag in ("-v", "--verbose"):
|
||||
verbosity += 1
|
||||
elif flag == "--test":
|
||||
test = True
|
||||
else:
|
||||
usage()
|
||||
if flag in ("-h", "--help"):
|
||||
return 0
|
||||
return 1
|
||||
|
||||
if len(args) == 1:
|
||||
if args[0] != "-":
|
||||
try:
|
||||
ifile = open(args[0], "rb")
|
||||
except IOError as e:
|
||||
print("could not open input file '{}' for reading: {}" \
|
||||
.format(args[0], e), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
elif len(args) != 0:
|
||||
usage()
|
||||
return 1
|
||||
|
||||
if out_channel is None:
|
||||
try:
|
||||
sc_root = os.environ["SEISCOMP_ROOT"]
|
||||
except KeyError:
|
||||
print("SEISCOMP_ROOT environment variable is not set", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
mseed_fifo = os.path.join(sc_root, "var", "run", seedlink, "mseedfifo")
|
||||
if verbosity:
|
||||
print("output data to %s" % mseed_fifo, file=sys.stderr)
|
||||
|
||||
if not os.path.exists(mseed_fifo):
|
||||
print("""\
|
||||
ERROR: {} does not exist.
|
||||
In order to push the records to SeedLink, it needs to run and must be configured for real-time playback.
|
||||
""".format(mseed_fifo), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if not stat.S_ISFIFO(os.stat(mseed_fifo).st_mode):
|
||||
print("""\
|
||||
ERROR: {} is not a named pipe
|
||||
Check if SeedLink is running and configured for real-time playback.
|
||||
""".format(mseed_fifo), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
out_channel = open(mseed_fifo, "wb")
|
||||
except Exception as e:
|
||||
print(str(e), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
delaydict = None
|
||||
if delays:
|
||||
delaydict = dict()
|
||||
try:
|
||||
f = open(delays, 'r')
|
||||
for line in f:
|
||||
content = line.split(':')
|
||||
if len(content) != 2:
|
||||
raise Exception("Could not parse a line in file %s: %s\n" % (delays, line))
|
||||
delaydict[content[0].strip()] = float(content[1].strip())
|
||||
except Exception as e:
|
||||
print("Error reading delay file {}: {}".format(delays, e),
|
||||
file=sys.stderr)
|
||||
|
||||
inp = rt_simul(ifile, speed=speed, jump=jump, delaydict=delaydict)
|
||||
stime = time.time()
|
||||
|
||||
time_diff = None
|
||||
print("Starting msrtsimul at {}".format(datetime.datetime.utcnow()), file=sys.stderr)
|
||||
for rec in inp:
|
||||
if rec.size != 512:
|
||||
print("Skipping record of {}.{}.{}.{} starting on {}: length != 512 Bytes: ".format(rec.net, rec.sta, rec.loc, rec.cha, str(rec.begin_time)), file=sys.stderr)
|
||||
continue
|
||||
if time_diff is None:
|
||||
ms = 1000000.0 * (rec.nsamp / rec.fsamp)
|
||||
time_diff = datetime.datetime.utcnow() - rec.begin_time - \
|
||||
datetime.timedelta(microseconds=ms)
|
||||
if mode == 'realtime':
|
||||
rec.begin_time += time_diff
|
||||
|
||||
if verbosity:
|
||||
print("%s_%s %7.2f %s %7.2f" % \
|
||||
(rec.net, rec.sta, (time.time() - stime), str(rec.begin_time),
|
||||
time.time() - calendar.timegm(rec.begin_time.timetuple())),
|
||||
file=sys.stderr)
|
||||
|
||||
if not test:
|
||||
rec.write(out_channel, 9)
|
||||
out_channel.flush()
|
||||
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
except Exception as e:
|
||||
print("Exception: {}".format(str(e)), file=sys.stderr)
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
Binary file not shown.
@ -0,0 +1,217 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
import time
|
||||
import sys
|
||||
import os
|
||||
import time
|
||||
import seiscomp.core, seiscomp.client, seiscomp.datamodel, seiscomp.logging
|
||||
from seiscomp.scbulletin import Bulletin, stationCount
|
||||
|
||||
|
||||
class ProcAlert(seiscomp.client.Application):
|
||||
def __init__(self, argc, argv):
|
||||
seiscomp.client.Application.__init__(self, argc, argv)
|
||||
|
||||
self.setMessagingEnabled(True)
|
||||
self.setDatabaseEnabled(True, True)
|
||||
|
||||
self.setAutoApplyNotifierEnabled(True)
|
||||
self.setInterpretNotifierEnabled(True)
|
||||
|
||||
self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP)
|
||||
self.addMessagingSubscription("EVENT")
|
||||
self.addMessagingSubscription("LOCATION")
|
||||
self.addMessagingSubscription("MAGNITUDE")
|
||||
|
||||
self.maxAgeDays = 1.
|
||||
self.minPickCount = 25
|
||||
|
||||
self.procAlertScript = ""
|
||||
|
||||
ep = seiscomp.datamodel.EventParameters()
|
||||
|
||||
def createCommandLineDescription(self):
|
||||
try:
|
||||
self.commandline().addGroup("Publishing")
|
||||
self.commandline().addIntOption("Publishing", "min-arr",
|
||||
"Minimum arrival count of a published origin", self.minPickCount)
|
||||
self.commandline().addDoubleOption("Publishing", "max-age",
|
||||
"Maximum age in days of published origins", self.maxAgeDays)
|
||||
self.commandline().addStringOption("Publishing", "procalert-script",
|
||||
"Specify the script to publish an event. The ProcAlert file and the event id are passed as parameter $1 and $2")
|
||||
self.commandline().addOption("Publishing", "test",
|
||||
"Test mode, no messages are sent")
|
||||
except:
|
||||
seiscomp.logging.warning(
|
||||
"caught unexpected error %s" % sys.exc_info())
|
||||
|
||||
def initConfiguration(self):
|
||||
if not seiscomp.client.Application.initConfiguration(self):
|
||||
return False
|
||||
|
||||
try:
|
||||
self.procAlertScript = self.configGetString("scripts.procAlert")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.minPickCount = self.configGetInt("minArrivals")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.maxAgeDays = self.configGetDouble("maxAgeDays")
|
||||
except:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
def init(self):
|
||||
if not seiscomp.client.Application.init(self):
|
||||
return False
|
||||
|
||||
try:
|
||||
self.procAlertScript = self.commandline().optionString("procalert-script")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.minPickCount = self.commandline().optionInt("min-arr")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.maxAgeDays = self.commandline().optionDouble("max-age")
|
||||
except:
|
||||
pass
|
||||
|
||||
self.bulletin = Bulletin(self.query(), "autoloc1")
|
||||
self.cache = seiscomp.datamodel.PublicObjectRingBuffer(
|
||||
self.query(), 100)
|
||||
|
||||
if not self.procAlertScript:
|
||||
seiscomp.logging.warning("No procalert script given")
|
||||
else:
|
||||
seiscomp.logging.info(
|
||||
"Using procalert script: %s" % self.procAlertScript)
|
||||
|
||||
return True
|
||||
|
||||
def addObject(self, parentID, obj):
|
||||
org = seiscomp.datamodel.Origin.Cast(obj)
|
||||
if org:
|
||||
self.cache.feed(org)
|
||||
seiscomp.logging.info("Received origin %s" % org.publicID())
|
||||
return
|
||||
|
||||
self.updateObject(parentID, obj)
|
||||
|
||||
def updateObject(self, parentID, obj):
|
||||
try:
|
||||
evt = seiscomp.datamodel.Event.Cast(obj)
|
||||
if evt:
|
||||
orid = evt.preferredOriginID()
|
||||
|
||||
org = self.cache.get(seiscomp.datamodel.Origin, orid)
|
||||
if not org:
|
||||
seiscomp.logging.error("Unable to fetch origin %s" % orid)
|
||||
return
|
||||
|
||||
if org.arrivalCount() == 0:
|
||||
self.query().loadArrivals(org)
|
||||
if org.stationMagnitudeCount() == 0:
|
||||
self.query().loadStationMagnitudes(org)
|
||||
if org.magnitudeCount() == 0:
|
||||
self.query().loadMagnitudes(org)
|
||||
|
||||
if not self.originMeetsCriteria(org, evt):
|
||||
seiscomp.logging.warning("Origin %s not published" % orid)
|
||||
return
|
||||
|
||||
txt = self.bulletin.printEvent(evt)
|
||||
|
||||
for line in txt.split("\n"):
|
||||
line = line.rstrip()
|
||||
seiscomp.logging.info(line)
|
||||
seiscomp.logging.info("")
|
||||
|
||||
if not self.commandline().hasOption("test"):
|
||||
self.send_procalert(txt, evt.publicID())
|
||||
|
||||
return
|
||||
|
||||
except:
|
||||
sys.stderr.write("%s\n" % sys.exc_info())
|
||||
|
||||
def hasValidNetworkMagnitude(self, org, evt):
|
||||
nmag = org.magnitudeCount()
|
||||
for imag in range(nmag):
|
||||
mag = org.magnitude(imag)
|
||||
if mag.publicID() == evt.preferredMagnitudeID():
|
||||
return True
|
||||
return False
|
||||
|
||||
def send_procalert(self, txt, evid):
|
||||
if self.procAlertScript:
|
||||
tmp = "/tmp/yyy%s" % evid.replace("/", "_").replace(":", "-")
|
||||
f = file(tmp, "w")
|
||||
f.write("%s" % txt)
|
||||
f.close()
|
||||
|
||||
os.system(self.procAlertScript + " " + tmp + " " + evid)
|
||||
|
||||
def coordinates(self, org):
|
||||
return org.latitude().value(), org.longitude().value(), org.depth().value()
|
||||
|
||||
def originMeetsCriteria(self, org, evt):
|
||||
publish = True
|
||||
|
||||
lat, lon, dep = self.coordinates(org)
|
||||
|
||||
if 43 < lat < 70 and -10 < lon < 60 and dep > 200:
|
||||
seiscomp.logging.error("suspicious region/depth - ignored")
|
||||
publish = False
|
||||
|
||||
if stationCount(org) < self.minPickCount:
|
||||
seiscomp.logging.error("too few picks - ignored")
|
||||
publish = False
|
||||
|
||||
now = seiscomp.core.Time.GMT()
|
||||
if (now - org.time().value()).seconds()/86400. > self.maxAgeDays:
|
||||
seiscomp.logging.error("origin too old - ignored")
|
||||
publish = False
|
||||
|
||||
try:
|
||||
if org.evaluationMode() == seiscomp.datamodel.MANUAL:
|
||||
publish = True
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if org.evaluationStatus() == seiscomp.datamodel.CONFIRMED:
|
||||
publish = True
|
||||
except:
|
||||
pass
|
||||
|
||||
if not self.hasValidNetworkMagnitude(org, evt):
|
||||
seiscomp.logging.error("no network magnitude - ignored")
|
||||
publish = False
|
||||
|
||||
return publish
|
||||
|
||||
|
||||
app = ProcAlert(len(sys.argv), sys.argv)
|
||||
sys.exit(app())
|
@ -0,0 +1 @@
|
||||
scml2inv
|
@ -0,0 +1,717 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import subprocess
|
||||
import traceback
|
||||
import seiscomp.core, seiscomp.client, seiscomp.datamodel, seiscomp.math
|
||||
import seiscomp.logging, seiscomp.seismology, seiscomp.system
|
||||
|
||||
|
||||
class ObjectAlert(seiscomp.client.Application):
|
||||
|
||||
def __init__(self, argc, argv):
|
||||
seiscomp.client.Application.__init__(self, argc, argv)
|
||||
|
||||
self.setMessagingEnabled(True)
|
||||
self.setDatabaseEnabled(True, True)
|
||||
self.setLoadRegionsEnabled(True)
|
||||
self.setMessagingUsername("")
|
||||
self.setPrimaryMessagingGroup(
|
||||
seiscomp.client.Protocol.LISTENER_GROUP)
|
||||
self.addMessagingSubscription("EVENT")
|
||||
self.addMessagingSubscription("LOCATION")
|
||||
self.addMessagingSubscription("MAGNITUDE")
|
||||
|
||||
self.setAutoApplyNotifierEnabled(True)
|
||||
self.setInterpretNotifierEnabled(True)
|
||||
|
||||
self.setLoadCitiesEnabled(True)
|
||||
self.setLoadRegionsEnabled(True)
|
||||
|
||||
self._ampType = "snr"
|
||||
self._citiesMaxDist = 20
|
||||
self._citiesMinPopulation = 50000
|
||||
|
||||
self._eventDescriptionPattern = None
|
||||
self._pickScript = None
|
||||
self._ampScript = None
|
||||
self._alertScript = None
|
||||
self._eventScript = None
|
||||
|
||||
self._pickProc = None
|
||||
self._ampProc = None
|
||||
self._alertProc = None
|
||||
self._eventProc = None
|
||||
|
||||
self._newWhenFirstSeen = False
|
||||
self._oldEvents = []
|
||||
self._agencyIDs = []
|
||||
self._phaseHints = []
|
||||
self._phaseStreams = []
|
||||
self._phaseNumber = 1
|
||||
self._phaseInterval = 1
|
||||
|
||||
def createCommandLineDescription(self):
|
||||
self.commandline().addOption("Generic", "first-new",
|
||||
"calls an event a new event when it is seen the first time")
|
||||
self.commandline().addGroup("Alert")
|
||||
self.commandline().addStringOption("Alert", "amp-type",
|
||||
"amplitude type to listen to", self._ampType)
|
||||
self.commandline().addStringOption("Alert", "pick-script",
|
||||
"script to be called when a pick arrived, network-, station code pick publicID are passed as parameters $1, $2, $3 and $4")
|
||||
self.commandline().addStringOption("Alert", "amp-script",
|
||||
"script to be called when a station amplitude arrived, network-, station code, amplitude and amplitude publicID are passed as parameters $1, $2, $3 and $4")
|
||||
self.commandline().addStringOption("Alert", "alert-script",
|
||||
"script to be called when a preliminary origin arrived, latitude and longitude are passed as parameters $1 and $2")
|
||||
self.commandline().addStringOption("Alert", "event-script",
|
||||
"script to be called when an event has been declared; the message string, a flag (1=new event, 0=update event), the EventID, the arrival count and the magnitude (optional when set) are passed as parameter $1, $2, $3, $4 and $5")
|
||||
self.commandline().addGroup("Cities")
|
||||
self.commandline().addStringOption("Cities", "max-dist",
|
||||
"maximum distance for using the distance from a city to the earthquake")
|
||||
self.commandline().addStringOption("Cities", "min-population",
|
||||
"minimum population for a city to become a point of interest")
|
||||
self.commandline().addGroup("Debug")
|
||||
self.commandline().addStringOption("Debug", "eventid,E", "specify Event ID")
|
||||
return True
|
||||
|
||||
def init(self):
|
||||
if not seiscomp.client.Application.init(self):
|
||||
return False
|
||||
|
||||
foundScript = False
|
||||
# module configuration paramters
|
||||
try:
|
||||
self._newWhenFirstSeen = self.configGetBool("firstNew")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._agencyIDs = [self.configGetString("agencyID")]
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
agencyIDs = self.configGetStrings("agencyIDs")
|
||||
self._agencyIDs = []
|
||||
for item in agencyIDs:
|
||||
item = item.strip()
|
||||
if item not in self._agencyIDs:
|
||||
self._agencyIDs.append(item)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
self._phaseHints = ['P','S']
|
||||
try:
|
||||
phaseHints = self.configGetStrings("constraints.phaseHints")
|
||||
self._phaseHints = []
|
||||
for item in phaseHints:
|
||||
item = item.strip()
|
||||
if item not in self._phaseHints:
|
||||
self._phaseHints.append(item)
|
||||
except:
|
||||
pass
|
||||
|
||||
self._phaseStreams = []
|
||||
try:
|
||||
phaseStreams = self.configGetStrings("constraints.phaseStreams")
|
||||
for item in phaseStreams:
|
||||
rule = item.strip()
|
||||
# rule is NET.STA.LOC.CHA and the special charactes ? * | ( ) are allowed
|
||||
if not re.fullmatch(r'[A-Z|a-z|0-9|\?|\*|\||\(|\)|\.]+', rule):
|
||||
seiscomp.logging.error("Wrong stream ID format in `constraints.phaseStreams`: %s" % item)
|
||||
return False
|
||||
# convert rule to a valid regular expression
|
||||
rule = re.sub(r'\.', r'\.', rule)
|
||||
rule = re.sub(r'\?', '.' , rule)
|
||||
rule = re.sub(r'\*' , '.*' , rule)
|
||||
if rule not in self._phaseStreams:
|
||||
self._phaseStreams.append(rule)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._phaseNumber = self.configGetInt("constraints.phaseNumber")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._phaseInterval = self.configGetInt("constraints.phaseInterval")
|
||||
except:
|
||||
pass
|
||||
|
||||
if self._phaseNumber > 1:
|
||||
self._pickCache = seiscomp.datamodel.PublicObjectTimeSpanBuffer()
|
||||
self._pickCache.setTimeSpan(seiscomp.core.TimeSpan(self._phaseInterval))
|
||||
self.enableTimer(1)
|
||||
|
||||
try:
|
||||
self._eventDescriptionPattern = self.configGetString("poi.message")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._citiesMaxDist = self.configGetDouble("poi.maxDist")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._citiesMinPopulation = self.configGetInt("poi.minPopulation")
|
||||
except:
|
||||
pass
|
||||
|
||||
# mostly command-line options
|
||||
try:
|
||||
self._citiesMaxDist = self.commandline().optionDouble("max-dist")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if self.commandline().hasOption("first-new"):
|
||||
self._newWhenFirstSeen = True
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._citiesMinPopulation = self.commandline().optionInt("min-population")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._ampType = self.commandline().optionString("amp-type")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._pickScript = self.commandline().optionString("pick-script")
|
||||
except:
|
||||
try:
|
||||
self._pickScript = self.configGetString("scripts.pick")
|
||||
except:
|
||||
seiscomp.logging.warning("No pick script defined")
|
||||
|
||||
if self._pickScript:
|
||||
self._pickScript = seiscomp.system.Environment.Instance().absolutePath(self._pickScript)
|
||||
seiscomp.logging.info("Using pick script %s" % self._pickScript)
|
||||
|
||||
if not os.path.isfile(self._pickScript):
|
||||
seiscomp.logging.error(" + not exising")
|
||||
return False
|
||||
|
||||
if not os.access(self._pickScript, os.X_OK):
|
||||
seiscomp.logging.error(" + not executable")
|
||||
return False
|
||||
|
||||
foundScript = True
|
||||
|
||||
try:
|
||||
self._ampScript = self.commandline().optionString("amp-script")
|
||||
except:
|
||||
try:
|
||||
self._ampScript = self.configGetString("scripts.amplitude")
|
||||
except:
|
||||
seiscomp.logging.warning("No amplitude script defined")
|
||||
|
||||
if self._ampScript:
|
||||
self._ampScript = seiscomp.system.Environment.Instance().absolutePath(self._ampScript)
|
||||
seiscomp.logging.info("Using amplitude script %s" % self._ampScript)
|
||||
|
||||
if not os.path.isfile(self._ampScript):
|
||||
seiscomp.logging.error(" + not exising")
|
||||
return False
|
||||
|
||||
if not os.access(self._ampScript, os.X_OK):
|
||||
seiscomp.logging.error(" + not executable")
|
||||
return False
|
||||
|
||||
foundScript = True
|
||||
|
||||
try:
|
||||
self._alertScript = self.commandline().optionString("alert-script")
|
||||
except:
|
||||
try:
|
||||
self._alertScript = self.configGetString("scripts.alert")
|
||||
except:
|
||||
seiscomp.logging.warning("No alert script defined")
|
||||
|
||||
if self._alertScript:
|
||||
self._alertScript = seiscomp.system.Environment.Instance(
|
||||
).absolutePath(self._alertScript)
|
||||
seiscomp.logging.info("Using alert script %s" % self._alertScript)
|
||||
|
||||
if not os.path.isfile(self._alertScript):
|
||||
seiscomp.logging.error(" + not exising")
|
||||
return False
|
||||
|
||||
if not os.access(self._alertScript, os.X_OK):
|
||||
seiscomp.logging.error(" + not executable")
|
||||
return False
|
||||
|
||||
foundScript = True
|
||||
|
||||
try:
|
||||
self._eventScript = self.commandline().optionString("event-script")
|
||||
except:
|
||||
try:
|
||||
self._eventScript = self.configGetString("scripts.event")
|
||||
except:
|
||||
seiscomp.logging.warning("No event script defined")
|
||||
|
||||
if self._eventScript:
|
||||
self._eventScript = seiscomp.system.Environment.Instance(
|
||||
).absolutePath(self._eventScript)
|
||||
seiscomp.logging.info("Using event script %s" % self._eventScript)
|
||||
|
||||
if not os.path.isfile(self._eventScript):
|
||||
seiscomp.logging.error(" + not exising")
|
||||
return False
|
||||
|
||||
if not os.access(self._eventScript, os.X_OK):
|
||||
seiscomp.logging.error(" + not executable")
|
||||
return False
|
||||
|
||||
foundScript = True
|
||||
|
||||
if not foundScript:
|
||||
seiscomp.logging.error("Found no valid script in configuration")
|
||||
return False
|
||||
|
||||
seiscomp.logging.info("Creating ringbuffer for 100 objects")
|
||||
if not self.query():
|
||||
seiscomp.logging.warning(
|
||||
"No valid database interface to read from")
|
||||
self._cache = seiscomp.datamodel.PublicObjectRingBuffer(
|
||||
self.query(), 100)
|
||||
|
||||
if self._ampScript and self.connection():
|
||||
seiscomp.logging.info(
|
||||
"Amplitude script defined: subscribing to AMPLITUDE message group")
|
||||
self.connection().subscribe("AMPLITUDE")
|
||||
|
||||
if self._pickScript and self.connection():
|
||||
seiscomp.logging.info(
|
||||
"Pick script defined: subscribing to PICK message group")
|
||||
self.connection().subscribe("PICK")
|
||||
|
||||
if self._newWhenFirstSeen:
|
||||
seiscomp.logging.info(
|
||||
"A new event is declared when I see it the first time")
|
||||
|
||||
seiscomp.logging.info("Filtering:")
|
||||
if " ".join(self._agencyIDs):
|
||||
seiscomp.logging.info(" + agencyIDs filter for events and picks: %s" % (" ".join(self._agencyIDs)))
|
||||
else:
|
||||
seiscomp.logging.info(" + agencyIDs: no filter is applied")
|
||||
|
||||
if " ".join(self._phaseHints):
|
||||
seiscomp.logging.info(" + phase hint filter for picks: '%s'" % (" ".join(self._phaseHints)))
|
||||
else:
|
||||
seiscomp.logging.info(" + phase hints: no filter is applied")
|
||||
|
||||
if " ".join(self._phaseStreams):
|
||||
seiscomp.logging.info(" + phase stream ID filter for picks: '%s'" % (" ".join(self._phaseStreams)))
|
||||
else:
|
||||
seiscomp.logging.info(" + phase stream ID: no filter is applied")
|
||||
|
||||
return True
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
try:
|
||||
eventID = self.commandline().optionString("eventid")
|
||||
event = self._cache.get(seiscomp.datamodel.Event, eventID)
|
||||
if event:
|
||||
self.notifyEvent(event)
|
||||
except:
|
||||
pass
|
||||
|
||||
return seiscomp.client.Application.run(self)
|
||||
except:
|
||||
info = traceback.format_exception(*sys.exc_info())
|
||||
for i in info:
|
||||
sys.stderr.write(i)
|
||||
return False
|
||||
|
||||
|
||||
def runPickScript(self, pickObjectList):
|
||||
if not self._pickScript:
|
||||
return
|
||||
|
||||
for pickObject in pickObjectList:
|
||||
# parse values
|
||||
try:
|
||||
net = pickObject.waveformID().networkCode()
|
||||
except:
|
||||
net = "unknown"
|
||||
try:
|
||||
sta = pickObject.waveformID().stationCode()
|
||||
except:
|
||||
sta = "unknown"
|
||||
pickID = pickObject.publicID()
|
||||
try:
|
||||
phaseHint = pickObject.phaseHint().code()
|
||||
except:
|
||||
phaseHint = "unknown"
|
||||
|
||||
print(net, sta, pickID, phaseHint)
|
||||
|
||||
if self._pickProc is not None:
|
||||
if self._pickProc.poll() is None:
|
||||
seiscomp.logging.info(
|
||||
"Pick script still in progress -> wait one second")
|
||||
self._pickProc.wait(1)
|
||||
if self._pickProc.poll() is None:
|
||||
seiscomp.logging.warning(
|
||||
"Pick script still in progress -> skipping message")
|
||||
return
|
||||
try:
|
||||
self._pickProc = subprocess.Popen(
|
||||
[self._pickScript, net, sta, pickID, phaseHint])
|
||||
seiscomp.logging.info(
|
||||
"Started pick script with pid %d" % self._pickProc.pid)
|
||||
except:
|
||||
seiscomp.logging.error(
|
||||
"Failed to start pick script '%s'" % self._pickScript)
|
||||
|
||||
def runAmpScript(self, ampObject):
|
||||
if not self._ampScript:
|
||||
return
|
||||
|
||||
# parse values
|
||||
net = ampObject.waveformID().networkCode()
|
||||
sta = ampObject.waveformID().stationCode()
|
||||
amp = ampObject.amplitude().value()
|
||||
ampID = ampObject.publicID()
|
||||
|
||||
if self._ampProc is not None:
|
||||
if self._ampProc.poll() is None:
|
||||
seiscomp.logging.warning(
|
||||
"Amplitude script still in progress -> skipping message")
|
||||
return
|
||||
try:
|
||||
self._ampProc = subprocess.Popen(
|
||||
[self._ampScript, net, sta, "%.2f" % amp, ampID])
|
||||
seiscomp.logging.info(
|
||||
"Started amplitude script with pid %d" % self._ampProc.pid)
|
||||
except:
|
||||
seiscomp.logging.error(
|
||||
"Failed to start amplitude script '%s'" % self._ampScript)
|
||||
|
||||
def runAlert(self, lat, lon):
|
||||
if not self._alertScript:
|
||||
return
|
||||
|
||||
if self._alertProc is not None:
|
||||
if self._alertProc.poll() is None:
|
||||
seiscomp.logging.warning(
|
||||
"AlertScript still in progress -> skipping message")
|
||||
return
|
||||
try:
|
||||
self._alertProc = subprocess.Popen(
|
||||
[self._alertScript, "%.1f" % lat, "%.1f" % lon])
|
||||
seiscomp.logging.info(
|
||||
"Started alert script with pid %d" % self._alertProc.pid)
|
||||
except:
|
||||
seiscomp.logging.error(
|
||||
"Failed to start alert script '%s'" % self._alertScript)
|
||||
|
||||
def handleMessage(self, msg):
|
||||
try:
|
||||
dm = seiscomp.core.DataMessage.Cast(msg)
|
||||
if dm:
|
||||
for att in dm:
|
||||
org = seiscomp.datamodel.Origin.Cast(att)
|
||||
if org:
|
||||
try:
|
||||
if org.evaluationStatus() == seiscomp.datamodel.PRELIMINARY:
|
||||
self.runAlert(org.latitude().value(),
|
||||
org.longitude().value())
|
||||
except:
|
||||
pass
|
||||
|
||||
#ao = seiscomp.datamodel.ArtificialOriginMessage.Cast(msg)
|
||||
# if ao:
|
||||
# org = ao.origin()
|
||||
# if org:
|
||||
# self.runAlert(org.latitude().value(), org.longitude().value())
|
||||
# return
|
||||
|
||||
seiscomp.client.Application.handleMessage(self, msg)
|
||||
except:
|
||||
info = traceback.format_exception(*sys.exc_info())
|
||||
for i in info:
|
||||
sys.stderr.write(i)
|
||||
|
||||
def addObject(self, parentID, object):
|
||||
try:
|
||||
# pick
|
||||
obj = seiscomp.datamodel.Pick.Cast(object)
|
||||
if obj:
|
||||
self._cache.feed(obj)
|
||||
seiscomp.logging.debug("got new pick '%s'" % obj.publicID())
|
||||
agencyID = obj.creationInfo().agencyID()
|
||||
phaseHint = obj.phaseHint().code()
|
||||
if self._phaseStreams:
|
||||
waveformID = "%s.%s.%s.%s" % (
|
||||
obj.waveformID().networkCode(), obj.waveformID().stationCode(),
|
||||
obj.waveformID().locationCode(), obj.waveformID().channelCode())
|
||||
matched = False
|
||||
for rule in self._phaseStreams:
|
||||
if re.fullmatch(rule, waveformID):
|
||||
matched = True
|
||||
break
|
||||
if not matched:
|
||||
seiscomp.logging.debug(
|
||||
" + stream ID %s does not match constraints.phaseStreams rules"
|
||||
% (waveformID))
|
||||
return
|
||||
|
||||
if not self._agencyIDs or agencyID in self._agencyIDs:
|
||||
if not self._phaseHints or phaseHint in self._phaseHints:
|
||||
self.notifyPick(obj)
|
||||
else:
|
||||
seiscomp.logging.debug(" + phase hint %s does not match '%s'"
|
||||
% (phaseHint, self._phaseHints))
|
||||
else:
|
||||
seiscomp.logging.debug(" + agencyID %s does not match '%s'"
|
||||
% (agencyID, self._agencyIDs))
|
||||
return
|
||||
|
||||
# amplitude
|
||||
obj = seiscomp.datamodel.Amplitude.Cast(object)
|
||||
if obj:
|
||||
if obj.type() == self._ampType:
|
||||
seiscomp.logging.debug("got new %s amplitude '%s'" % (
|
||||
self._ampType, obj.publicID()))
|
||||
self.notifyAmplitude(obj)
|
||||
return
|
||||
|
||||
# origin
|
||||
obj = seiscomp.datamodel.Origin.Cast(object)
|
||||
if obj:
|
||||
self._cache.feed(obj)
|
||||
seiscomp.logging.debug("got new origin '%s'" % obj.publicID())
|
||||
|
||||
try:
|
||||
if obj.evaluationStatus() == seiscomp.datamodel.PRELIMINARY:
|
||||
self.runAlert(obj.latitude().value(),
|
||||
obj.longitude().value())
|
||||
except:
|
||||
pass
|
||||
|
||||
return
|
||||
|
||||
# magnitude
|
||||
obj = seiscomp.datamodel.Magnitude.Cast(object)
|
||||
if obj:
|
||||
self._cache.feed(obj)
|
||||
seiscomp.logging.debug(
|
||||
"got new magnitude '%s'" % obj.publicID())
|
||||
return
|
||||
|
||||
# event
|
||||
obj = seiscomp.datamodel.Event.Cast(object)
|
||||
if obj:
|
||||
org = self._cache.get(
|
||||
seiscomp.datamodel.Origin, obj.preferredOriginID())
|
||||
agencyID = org.creationInfo().agencyID()
|
||||
seiscomp.logging.debug("got new event '%s'" % obj.publicID())
|
||||
if not self._agencyIDs or agencyID in self._agencyIDs:
|
||||
self.notifyEvent(obj, True)
|
||||
return
|
||||
except:
|
||||
info = traceback.format_exception(*sys.exc_info())
|
||||
for i in info:
|
||||
sys.stderr.write(i)
|
||||
|
||||
def updateObject(self, parentID, object):
|
||||
try:
|
||||
obj = seiscomp.datamodel.Event.Cast(object)
|
||||
if obj:
|
||||
org = self._cache.get(
|
||||
seiscomp.datamodel.Origin, obj.preferredOriginID())
|
||||
agencyID = org.creationInfo().agencyID()
|
||||
seiscomp.logging.debug("update event '%s'" % obj.publicID())
|
||||
if not self._agencyIDs or agencyID in self._agencyIDs:
|
||||
self.notifyEvent(obj, False)
|
||||
except:
|
||||
info = traceback.format_exception(*sys.exc_info())
|
||||
for i in info:
|
||||
sys.stderr.write(i)
|
||||
|
||||
def handleTimeout(self):
|
||||
self.checkEnoughPicks()
|
||||
|
||||
def checkEnoughPicks(self):
|
||||
if self._pickCache.size() >= self._phaseNumber:
|
||||
# wait until self._phaseInterval has elapsed before calling the
|
||||
# script (more picks might come)
|
||||
timeWindowLength = (seiscomp.core.Time.GMT() - self._pickCache.oldest()).length()
|
||||
if timeWindowLength >= self._phaseInterval:
|
||||
picks = [seiscomp.datamodel.Pick.Cast(o) for o in self._pickCache]
|
||||
self.runPickScript(picks)
|
||||
self._pickCache.clear()
|
||||
|
||||
def notifyPick(self, pick):
|
||||
if self._phaseNumber <= 1:
|
||||
self.runPickScript([pick])
|
||||
else:
|
||||
self.checkEnoughPicks()
|
||||
self._pickCache.feed(pick)
|
||||
|
||||
def notifyAmplitude(self, amp):
|
||||
self.runAmpScript(amp)
|
||||
|
||||
def notifyEvent(self, evt, newEvent=True, dtmax=3600):
|
||||
try:
|
||||
org = self._cache.get(
|
||||
seiscomp.datamodel.Origin, evt.preferredOriginID())
|
||||
if not org:
|
||||
seiscomp.logging.warning(
|
||||
"unable to get origin %s, ignoring event message" % evt.preferredOriginID())
|
||||
return
|
||||
|
||||
preliminary = False
|
||||
try:
|
||||
if org.evaluationStatus() == seiscomp.datamodel.PRELIMINARY:
|
||||
preliminary = True
|
||||
except:
|
||||
pass
|
||||
|
||||
if preliminary == False:
|
||||
nmag = self._cache.get(
|
||||
seiscomp.datamodel.Magnitude, evt.preferredMagnitudeID())
|
||||
if nmag:
|
||||
mag = nmag.magnitude().value()
|
||||
mag = "magnitude %.1f" % mag
|
||||
else:
|
||||
if len(evt.preferredMagnitudeID()) > 0:
|
||||
seiscomp.logging.warning(
|
||||
"unable to get magnitude %s, ignoring event message" % evt.preferredMagnitudeID())
|
||||
else:
|
||||
seiscomp.logging.warning(
|
||||
"no preferred magnitude yet, ignoring event message")
|
||||
return
|
||||
|
||||
# keep track of old events
|
||||
if self._newWhenFirstSeen:
|
||||
if evt.publicID() in self._oldEvents:
|
||||
newEvent = False
|
||||
else:
|
||||
newEvent = True
|
||||
self._oldEvents.append(evt.publicID())
|
||||
|
||||
dsc = seiscomp.seismology.Regions.getRegionName(
|
||||
org.latitude().value(), org.longitude().value())
|
||||
|
||||
if self._eventDescriptionPattern:
|
||||
try:
|
||||
city, dist, azi = self.nearestCity(org.latitude().value(), org.longitude(
|
||||
).value(), self._citiesMaxDist, self._citiesMinPopulation)
|
||||
if city:
|
||||
dsc = self._eventDescriptionPattern
|
||||
region = seiscomp.seismology.Regions.getRegionName(
|
||||
org.latitude().value(), org.longitude().value())
|
||||
distStr = str(int(seiscomp.math.deg2km(dist)))
|
||||
dsc = dsc.replace("@region@", region).replace(
|
||||
"@dist@", distStr).replace("@poi@", city.name())
|
||||
except:
|
||||
pass
|
||||
|
||||
seiscomp.logging.debug("desc: %s" % dsc)
|
||||
|
||||
dep = org.depth().value()
|
||||
now = seiscomp.core.Time.GMT()
|
||||
otm = org.time().value()
|
||||
|
||||
dt = (now - otm).seconds()
|
||||
|
||||
# if dt > dtmax:
|
||||
# return
|
||||
|
||||
if dt > 3600:
|
||||
dt = "%d hours %d minutes ago" % (dt/3600, (dt % 3600)/60)
|
||||
elif dt > 120:
|
||||
dt = "%d minutes ago" % (dt/60)
|
||||
else:
|
||||
dt = "%d seconds ago" % dt
|
||||
|
||||
if preliminary:
|
||||
message = "earthquake, XXL, preliminary, %s, %s" % (dt, dsc)
|
||||
else:
|
||||
message = "earthquake, %s, %s, %s, depth %d kilometers" % (
|
||||
dt, dsc, mag, int(dep+0.5))
|
||||
seiscomp.logging.info(message)
|
||||
|
||||
if not self._eventScript:
|
||||
return
|
||||
|
||||
if self._eventProc is not None:
|
||||
if self._eventProc.poll() is None:
|
||||
seiscomp.logging.warning(
|
||||
"EventScript still in progress -> skipping message")
|
||||
return
|
||||
|
||||
try:
|
||||
param2 = 0
|
||||
param3 = 0
|
||||
param4 = ""
|
||||
if newEvent:
|
||||
param2 = 1
|
||||
|
||||
org = self._cache.get(
|
||||
seiscomp.datamodel.Origin, evt.preferredOriginID())
|
||||
if org:
|
||||
try:
|
||||
param3 = org.quality().associatedPhaseCount()
|
||||
except:
|
||||
pass
|
||||
|
||||
nmag = self._cache.get(
|
||||
seiscomp.datamodel.Magnitude, evt.preferredMagnitudeID())
|
||||
if nmag:
|
||||
param4 = "%.1f" % nmag.magnitude().value()
|
||||
|
||||
self._eventProc = subprocess.Popen(
|
||||
[self._eventScript, message, "%d" % param2, evt.publicID(), "%d" % param3, param4])
|
||||
seiscomp.logging.info(
|
||||
"Started event script with pid %d" % self._eventProc.pid)
|
||||
except:
|
||||
seiscomp.logging.error("Failed to start event script '%s %s %d %d %s'" % (
|
||||
self._eventScript, message, param2, param3, param4))
|
||||
except:
|
||||
info = traceback.format_exception(*sys.exc_info())
|
||||
for i in info:
|
||||
sys.stderr.write(i)
|
||||
|
||||
def printUsage(self):
|
||||
|
||||
print('''Usage:
|
||||
scalert [options]
|
||||
|
||||
Execute custom scripts upon arrival of objects or updates''')
|
||||
|
||||
seiscomp.client.Application.printUsage(self)
|
||||
|
||||
print('''Examples:
|
||||
Execute scalert on command line with debug output
|
||||
scalert --debug
|
||||
''')
|
||||
|
||||
app = ObjectAlert(len(sys.argv), sys.argv)
|
||||
sys.exit(app())
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
import seiscomp.scbulletin
|
||||
|
||||
if __name__ == "__main__":
|
||||
seiscomp.scbulletin.main()
|
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -0,0 +1,238 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
from __future__ import division, print_function
|
||||
|
||||
import sys
|
||||
import os
|
||||
import seiscomp.client
|
||||
import seiscomp.datamodel
|
||||
import seiscomp.config
|
||||
|
||||
|
||||
def readParams(sc_params):
|
||||
if sc_params.baseID():
|
||||
sc_params_base = seiscomp.datamodel.ParameterSet.Find(
|
||||
sc_params.baseID())
|
||||
if sc_params_base is None:
|
||||
sys.stderr.write("Warning: %s: base parameter set for %s not found\n" % (
|
||||
sc_params.baseID(), sc_params.publicID()))
|
||||
params = {}
|
||||
else:
|
||||
params = readParams(sc_params_base)
|
||||
else:
|
||||
params = {}
|
||||
|
||||
for i in range(sc_params.parameterCount()):
|
||||
p = sc_params.parameter(i)
|
||||
params[p.name()] = p.value()
|
||||
|
||||
return params
|
||||
|
||||
|
||||
class DumpCfg(seiscomp.client.Application):
|
||||
def __init__(self, argc, argv):
|
||||
if argc < 2:
|
||||
sys.stderr.write("scdumpcfg {modname} [options]\n")
|
||||
raise RuntimeError
|
||||
|
||||
self.appName = argv[1]
|
||||
|
||||
# Remove first parameter to replace appname with passed module name
|
||||
argc = argc-1
|
||||
argv = argv[1:]
|
||||
|
||||
seiscomp.client.Application.__init__(self, argc, argv)
|
||||
|
||||
self.setMessagingEnabled(True)
|
||||
self.setMessagingUsername("")
|
||||
self.setDatabaseEnabled(True, True)
|
||||
self.setLoadConfigModuleEnabled(True)
|
||||
self.setDaemonEnabled(False)
|
||||
|
||||
def createCommandLineDescription(self):
|
||||
self.commandline().addGroup("Dump")
|
||||
self.commandline().addStringOption("Dump", "param,P",
|
||||
"Specify parameter name to filter for.")
|
||||
self.commandline().addOption("Dump", "bindings,B",
|
||||
"Dump bindings instead of module configuration.")
|
||||
self.commandline().addOption("Dump", "allow-global,G",
|
||||
"Print global bindings if no module binding is avaible.")
|
||||
self.commandline().addOption("Dump", "cfg",
|
||||
"Print output in .cfg format.")
|
||||
self.commandline().addOption("Dump", "nslc",
|
||||
"Print the list of streams which have bindings of the given module.")
|
||||
|
||||
def validateParameters(self):
|
||||
if not seiscomp.client.Application.validateParameters(self):
|
||||
return False
|
||||
|
||||
self.dumpBindings = self.commandline().hasOption("bindings")
|
||||
|
||||
try:
|
||||
self.param = self.commandline().optionString("param")
|
||||
except:
|
||||
self.param = None
|
||||
|
||||
self.allowGlobal = self.commandline().hasOption("allow-global")
|
||||
self.formatCfg = self.commandline().hasOption("cfg")
|
||||
self.nslc = self.commandline().hasOption("nslc")
|
||||
|
||||
if not self.dumpBindings:
|
||||
self.setMessagingEnabled(False)
|
||||
self.setDatabaseEnabled(False, False)
|
||||
self.setLoadConfigModuleEnabled(False)
|
||||
|
||||
return True
|
||||
|
||||
def initConfiguration(self):
|
||||
if self.appName == "-h" or self.appName == "--help":
|
||||
self.printUsage()
|
||||
return False
|
||||
|
||||
return seiscomp.client.Application.initConfiguration(self)
|
||||
|
||||
# Do nothing.
|
||||
def initSubscriptions(self):
|
||||
return True
|
||||
|
||||
def printUsage(self):
|
||||
|
||||
print('''Usage:
|
||||
{} [options]
|
||||
|
||||
Dump bindings or module configurations used by a specific module or global for
|
||||
particular stations.'''.format(os.path.basename(__file__)), file=sys.stderr)
|
||||
|
||||
seiscomp.client.Application.printUsage(self)
|
||||
|
||||
print('''Examples:
|
||||
Dump global bindings configuration for all stations
|
||||
{} global -d localhost -B > config.xml
|
||||
'''.format(os.path.basename(__file__)), file=sys.stderr)
|
||||
|
||||
|
||||
def run(self):
|
||||
cfg = self.configuration()
|
||||
if self.nslc:
|
||||
nslc = set()
|
||||
|
||||
if not self.dumpBindings:
|
||||
symtab = cfg.symbolTable()
|
||||
names = cfg.names()
|
||||
count = 0
|
||||
for name in names:
|
||||
if self.param and self.param != name:
|
||||
continue
|
||||
sym = symtab.get(name)
|
||||
if self.formatCfg:
|
||||
if sym.comment:
|
||||
if count > 0:
|
||||
sys.stdout.write("\n")
|
||||
sys.stdout.write("%s\n" % sym.comment)
|
||||
sys.stdout.write("%s = %s\n" % (sym.name, sym.content))
|
||||
else:
|
||||
sys.stdout.write("%s\n" % sym.name)
|
||||
sys.stdout.write(" value(s) : %s\n" %
|
||||
", ".join(sym.values))
|
||||
sys.stdout.write(" source : %s\n" % sym.uri)
|
||||
count = count + 1
|
||||
|
||||
if self.param and count == 0:
|
||||
sys.stderr.write("%s: definition not found\n." % self.param)
|
||||
else:
|
||||
cfg = self.configModule()
|
||||
if cfg is None:
|
||||
sys.stderr.write("No config module read\n")
|
||||
return False
|
||||
|
||||
tmp = {}
|
||||
for i in range(cfg.configStationCount()):
|
||||
cfg_sta = cfg.configStation(i)
|
||||
tmp[(cfg_sta.networkCode(), cfg_sta.stationCode())] = cfg_sta
|
||||
|
||||
name = self.name()
|
||||
# For backward compatibility rename global to default
|
||||
if name == "global":
|
||||
name = "default"
|
||||
|
||||
for item in sorted(tmp.keys()):
|
||||
cfg_sta = tmp[item]
|
||||
sta_enabled = cfg_sta.enabled()
|
||||
cfg_setup = seiscomp.datamodel.findSetup(
|
||||
cfg_sta, name, self.allowGlobal)
|
||||
|
||||
if not cfg_setup is None:
|
||||
suffix = ""
|
||||
if sta_enabled and cfg_setup.enabled():
|
||||
out = "+ "
|
||||
else:
|
||||
suffix = " ("
|
||||
if not sta_enabled:
|
||||
suffix += "station disabled"
|
||||
if not cfg_setup.enabled():
|
||||
if suffix:
|
||||
suffix += ", "
|
||||
suffix += "setup disabled"
|
||||
suffix += ")"
|
||||
out = "- "
|
||||
out += "%s.%s%s\n" % (cfg_sta.networkCode(),
|
||||
cfg_sta.stationCode(), suffix)
|
||||
params = seiscomp.datamodel.ParameterSet.Find(
|
||||
cfg_setup.parameterSetID())
|
||||
if params is None:
|
||||
sys.stderr.write(
|
||||
"ERROR: %s: ParameterSet not found\n" %
|
||||
cfg_setup.parameterSetID())
|
||||
return False
|
||||
|
||||
params = readParams(params)
|
||||
if self.nslc:
|
||||
try:
|
||||
sensorLocation = params["detecLocid"]
|
||||
except:
|
||||
sensorLocation = ""
|
||||
try:
|
||||
detecStream = params["detecStream"]
|
||||
except:
|
||||
detecStream = ""
|
||||
|
||||
stream = "%s.%s.%s.%s" % \
|
||||
(cfg_sta.networkCode(), cfg_sta.stationCode(),
|
||||
sensorLocation, detecStream)
|
||||
nslc.add(stream)
|
||||
count = 0
|
||||
for param_name in sorted(params.keys()):
|
||||
if self.param and self.param != param_name:
|
||||
continue
|
||||
out += " %s: %s\n" % (param_name, params[param_name])
|
||||
count = count + 1
|
||||
|
||||
if not self.nslc and count > 0:
|
||||
sys.stdout.write(out)
|
||||
|
||||
if self.nslc:
|
||||
for stream in sorted(nslc):
|
||||
print(stream, file=sys.stdout)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
try:
|
||||
app = DumpCfg(len(sys.argv), sys.argv)
|
||||
except:
|
||||
sys.exit(1)
|
||||
|
||||
sys.exit(app())
|
@ -0,0 +1,75 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
import sys
|
||||
import seiscomp.client, seiscomp.datamodel, seiscomp.io
|
||||
|
||||
|
||||
class ObjectDumper(seiscomp.client.Application):
|
||||
|
||||
def __init__(self):
|
||||
seiscomp.client.Application.__init__(self, len(sys.argv), sys.argv)
|
||||
self.setMessagingEnabled(True)
|
||||
self.setDatabaseEnabled(True, False)
|
||||
self.setMessagingUsername("")
|
||||
|
||||
def createCommandLineDescription(self):
|
||||
seiscomp.client.Application.createCommandLineDescription(self)
|
||||
self.commandline().addGroup("Dump")
|
||||
self.commandline().addStringOption("Dump", "public-id,P", "publicID")
|
||||
|
||||
def loadEventParametersObject(self, publicID):
|
||||
for tp in \
|
||||
seiscomp.datamodel.Pick, seiscomp.datamodel.Amplitude, seiscomp.datamodel.Origin, \
|
||||
seiscomp.datamodel.Event, seiscomp.datamodel.FocalMechanism, \
|
||||
seiscomp.datamodel.Magnitude, seiscomp.datamodel.StationMagnitude:
|
||||
|
||||
obj = self.query().loadObject(tp.TypeInfo(), publicID)
|
||||
obj = tp.Cast(obj)
|
||||
if obj:
|
||||
ep = seiscomp.datamodel.EventParameters()
|
||||
ep.add(obj)
|
||||
return ep
|
||||
|
||||
def loadInventoryObject(self, publicID):
|
||||
for tp in \
|
||||
seiscomp.datamodel.Network, seiscomp.datamodel.Station, seiscomp.datamodel.Sensor, \
|
||||
seiscomp.datamodel.SensorLocation, seiscomp.datamodel.Stream:
|
||||
|
||||
obj = self.query().loadObject(tp.TypeInfo(), publicID)
|
||||
obj = tp.Cast(obj)
|
||||
if obj:
|
||||
return obj
|
||||
|
||||
def run(self):
|
||||
publicID = self.commandline().optionString("public-id")
|
||||
obj = self.loadEventParametersObject(publicID)
|
||||
if obj is None:
|
||||
obj = self.loadInventoryObject(publicID)
|
||||
if obj is None:
|
||||
raise ValueError("unknown object '" + publicID + "'")
|
||||
|
||||
# dump formatted XML archive to stdout
|
||||
ar = seiscomp.io.XMLArchive()
|
||||
ar.setFormattedOutput(True)
|
||||
ar.create("-")
|
||||
ar.writeObject(obj)
|
||||
ar.close()
|
||||
return True
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app = ObjectDumper()
|
||||
app()
|
@ -0,0 +1,76 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
import sys
|
||||
import os
|
||||
import seiscomp.client
|
||||
import seiscomp.datamodel
|
||||
import seiscomp.io
|
||||
|
||||
|
||||
class EventParameterLog(seiscomp.client.Application):
|
||||
def __init__(self, argc, argv):
|
||||
seiscomp.client.Application.__init__(self, argc, argv)
|
||||
|
||||
self.setMessagingEnabled(True)
|
||||
self.setDatabaseEnabled(False, False)
|
||||
self.setMessagingUsername("")
|
||||
self.setPrimaryMessagingGroup(
|
||||
seiscomp.client.Protocol.LISTENER_GROUP)
|
||||
self.addMessagingSubscription("EVENT")
|
||||
self.addMessagingSubscription("LOCATION")
|
||||
self.addMessagingSubscription("MAGNITUDE")
|
||||
self.addMessagingSubscription("AMPLITUDE")
|
||||
self.addMessagingSubscription("PICK")
|
||||
|
||||
self.setAutoApplyNotifierEnabled(True)
|
||||
self.setInterpretNotifierEnabled(True)
|
||||
|
||||
# EventParameter object
|
||||
self._eventParameters = seiscomp.datamodel.EventParameters()
|
||||
|
||||
def printUsage(self):
|
||||
|
||||
print('''Usage:
|
||||
sceplog [options]
|
||||
|
||||
Receive event parameters from messaging and write them to stdout in SCML''')
|
||||
|
||||
seiscomp.client.Application.printUsage(self)
|
||||
|
||||
print('''Examples:
|
||||
Execute sceplog with debug output
|
||||
sceplog --debug
|
||||
''')
|
||||
|
||||
def run(self):
|
||||
if not seiscomp.client.Application.run(self):
|
||||
return False
|
||||
|
||||
ar = seiscomp.io.XMLArchive()
|
||||
ar.setFormattedOutput(True)
|
||||
if ar.create("-"):
|
||||
ar.writeObject(self._eventParameters)
|
||||
ar.close()
|
||||
# Hack to avoid the "close failed in file object destructor"
|
||||
# exception
|
||||
# print ""
|
||||
sys.stdout.write("\n")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
app = EventParameterLog(len(sys.argv), sys.argv)
|
||||
sys.exit(app())
|
Binary file not shown.
@ -0,0 +1,850 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
import sys
|
||||
import os
|
||||
import traceback
|
||||
import re
|
||||
import seiscomp.core
|
||||
import seiscomp.client
|
||||
import seiscomp.datamodel
|
||||
import seiscomp.io
|
||||
import seiscomp.logging
|
||||
import seiscomp.system
|
||||
|
||||
|
||||
def time2str(time):
|
||||
"""
|
||||
Convert a seiscomp.core.Time to a string
|
||||
"""
|
||||
return time.toString("%Y-%m-%d %H:%M:%S.%f000000")[:23]
|
||||
|
||||
|
||||
def createDirectory(dir):
|
||||
if os.access(dir, os.W_OK):
|
||||
return True
|
||||
|
||||
try:
|
||||
os.makedirs(dir)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def originStatusToChar(org):
|
||||
# Manual origin are always tagged as M
|
||||
try:
|
||||
if org.evaluationMode() == seiscomp.datamodel.MANUAL:
|
||||
return 'M'
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if org.evaluationStatus() == seiscomp.datamodel.PRELIMINARY:
|
||||
return 'P'
|
||||
elif org.evaluationStatus() == seiscomp.datamodel.CONFIRMED or \
|
||||
org.evaluationStatus() == seiscomp.datamodel.REVIEWED or \
|
||||
org.evaluationStatus() == seiscomp.datamodel.FINAL:
|
||||
return 'C'
|
||||
elif org.evaluationStatus() == seiscomp.datamodel.REJECTED:
|
||||
return 'X'
|
||||
elif org.evaluationStatus() == seiscomp.datamodel.REPORTED:
|
||||
return 'R'
|
||||
except:
|
||||
pass
|
||||
|
||||
return 'A'
|
||||
|
||||
|
||||
class CachePopCallback(seiscomp.datamodel.CachePopCallback):
|
||||
def __init__(self, target):
|
||||
seiscomp.datamodel.CachePopCallback.__init__(self)
|
||||
self.target = target
|
||||
|
||||
def handle(self, obj):
|
||||
self.target.objectAboutToPop(obj)
|
||||
|
||||
|
||||
class EventHistory(seiscomp.client.Application):
|
||||
def __init__(self, argc, argv):
|
||||
seiscomp.client.Application.__init__(self, argc, argv)
|
||||
seiscomp.datamodel.Notifier.SetEnabled(False)
|
||||
|
||||
self.setMessagingEnabled(True)
|
||||
self.setDatabaseEnabled(True, True)
|
||||
self.setMessagingUsername("scevtlog")
|
||||
self.setPrimaryMessagingGroup(
|
||||
seiscomp.client.Protocol.LISTENER_GROUP)
|
||||
self.addMessagingSubscription("EVENT")
|
||||
self.addMessagingSubscription("LOCATION")
|
||||
self.addMessagingSubscription("MAGNITUDE")
|
||||
|
||||
self.setAutoApplyNotifierEnabled(True)
|
||||
self.setInterpretNotifierEnabled(True)
|
||||
|
||||
# Create a callback object that gets called when an object
|
||||
# is going to be removed from the cache
|
||||
self._popCallback = CachePopCallback(self)
|
||||
|
||||
# Create an object cache of half an hour
|
||||
self._cache = seiscomp.datamodel.PublicObjectTimeSpanBuffer(
|
||||
self.query(), seiscomp.core.TimeSpan(30.0*60.0))
|
||||
self._cache.setPopCallback(self._popCallback)
|
||||
|
||||
# Event progress counter
|
||||
self._eventProgress = dict()
|
||||
|
||||
# Event-Origin mapping
|
||||
self._eventToOrg = dict()
|
||||
self._orgToEvent = dict()
|
||||
|
||||
# Event-Magnitude mapping
|
||||
self._eventToMag = dict()
|
||||
self._magToEvent = dict()
|
||||
|
||||
self._directory = "@LOGDIR@/events"
|
||||
self._format = "xml"
|
||||
self._currentDirectory = ""
|
||||
self._revisionFileExt = ".zip"
|
||||
self._useGZIP = False
|
||||
|
||||
def createCommandLineDescription(self):
|
||||
try:
|
||||
self.commandline().addGroup("Storage")
|
||||
self.commandline().addStringOption(
|
||||
"Storage", "directory,o", "Specify the storage directory. "
|
||||
"Default: @LOGDIR@/events.")
|
||||
self.commandline().addStringOption("Storage", "format,f",
|
||||
"Specify storage format (autoloc1, autoloc3, xml [default])")
|
||||
except:
|
||||
seiscomp.logging.warning(
|
||||
"caught unexpected error %s" % sys.exc_info())
|
||||
return True
|
||||
|
||||
def initConfiguration(self):
|
||||
if not seiscomp.client.Application.initConfiguration(self):
|
||||
return False
|
||||
|
||||
try:
|
||||
self._directory = self.configGetString("directory")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._format = self.configGetString("format")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if self.configGetBool("gzip"):
|
||||
self._useGZIP = True
|
||||
self._revisionFileExt = ".gz"
|
||||
except:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
def printUsage(self):
|
||||
print('''Usage:
|
||||
scevtlog [options]
|
||||
|
||||
Save event history into files''')
|
||||
|
||||
seiscomp.client.Application.printUsage(self)
|
||||
|
||||
print('''Examples:
|
||||
Execute on command line with debug output
|
||||
scevtlog --debug
|
||||
''')
|
||||
|
||||
def init(self):
|
||||
if not seiscomp.client.Application.init(self):
|
||||
return False
|
||||
|
||||
try:
|
||||
self._directory = self.commandline().optionString("directory")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._format = self.commandline().optionString("format")
|
||||
except:
|
||||
pass
|
||||
|
||||
if self._format != "autoloc1" and self._format != "autoloc3" and self._format != "xml":
|
||||
self._format = "xml"
|
||||
|
||||
try:
|
||||
if self._directory[-1] != "/":
|
||||
self._directory = self._directory + "/"
|
||||
except:
|
||||
pass
|
||||
|
||||
if self._directory:
|
||||
self._directory = seiscomp.system.Environment.Instance().absolutePath(self._directory)
|
||||
sys.stderr.write("Logging events to %s\n" % self._directory)
|
||||
|
||||
self._cache.setDatabaseArchive(self.query())
|
||||
return True
|
||||
|
||||
# def run(self):
|
||||
# obj = self._cache.get(seiscomp.datamodel.Magnitude, "or080221153929#16#netMag.mb")
|
||||
|
||||
# self.updateObject(obj)
|
||||
# return True
|
||||
|
||||
def done(self):
|
||||
seiscomp.client.Application.done(self)
|
||||
self._cache.setDatabaseArchive(None)
|
||||
|
||||
def printEvent(self, evt, newEvent):
|
||||
if self._format != "xml":
|
||||
self.printEventProcAlert(evt, newEvent)
|
||||
else:
|
||||
self.printEventXML(evt, newEvent)
|
||||
self.advanceEventProgress(evt.publicID())
|
||||
|
||||
def getSummary(self, time, org, mag):
|
||||
strTime = time.toString("%Y-%m-%d %H:%M:%S")
|
||||
summary = [strTime, "", "", "", "", "", "", "", "", ""]
|
||||
|
||||
if org:
|
||||
tim = org.time().value()
|
||||
latency = time - tim
|
||||
|
||||
summary[1] = "%5d.%02d" % (
|
||||
latency.seconds() / 60, (latency.seconds() % 60) * 100 / 60)
|
||||
|
||||
lat = org.latitude().value()
|
||||
lon = org.longitude().value()
|
||||
|
||||
dep = "%7s" % "---"
|
||||
try:
|
||||
dep = "%7.0f" % org.depth().value()
|
||||
summary[4] = dep
|
||||
except:
|
||||
summary[4] = "%7s" % ""
|
||||
|
||||
phases = "%5s" % "---"
|
||||
try:
|
||||
phases = "%5d" % org.quality().usedPhaseCount()
|
||||
summary[5] = phases
|
||||
except:
|
||||
summary[5] = "%5s" % ""
|
||||
|
||||
summary[2] = "%7.2f" % lat
|
||||
summary[3] = "%7.2f" % lon
|
||||
|
||||
try:
|
||||
summary[9] = originStatusToChar(org)
|
||||
except:
|
||||
summary[9] = "-"
|
||||
|
||||
if mag:
|
||||
summary[6] = "%12s" % mag.type()
|
||||
summary[7] = "%5.2f" % mag.magnitude().value()
|
||||
try:
|
||||
summary[8] = "%5d" % mag.stationCount()
|
||||
except:
|
||||
summary[8] = " "
|
||||
else:
|
||||
summary[6] = "%12s" % ""
|
||||
summary[7] = " "
|
||||
summary[8] = " "
|
||||
|
||||
return summary
|
||||
|
||||
def printEventProcAlert(self, evt, newEvent):
|
||||
now = seiscomp.core.Time.GMT()
|
||||
|
||||
org = self._cache.get(seiscomp.datamodel.Origin,
|
||||
evt.preferredOriginID())
|
||||
prefmag = self._cache.get(
|
||||
seiscomp.datamodel.Magnitude, evt.preferredMagnitudeID())
|
||||
|
||||
summary = self.getSummary(now, org, prefmag)
|
||||
|
||||
# Load arrivals
|
||||
if org.arrivalCount() == 0:
|
||||
self.query().loadArrivals(org)
|
||||
|
||||
# Load station magnitudes
|
||||
if org.stationMagnitudeCount() == 0:
|
||||
self.query().loadStationMagnitudes(org)
|
||||
|
||||
# Load magnitudes
|
||||
if org.magnitudeCount() == 0:
|
||||
self.query().loadMagnitudes(org)
|
||||
|
||||
picks = []
|
||||
amps = []
|
||||
|
||||
if org:
|
||||
narr = org.arrivalCount()
|
||||
for i in range(narr):
|
||||
picks.append(self._cache.get(
|
||||
seiscomp.datamodel.Pick, org.arrival(i).pickID()))
|
||||
|
||||
nstamags = org.stationMagnitudeCount()
|
||||
for i in range(nstamags):
|
||||
amps.append(self._cache.get(
|
||||
seiscomp.datamodel.Amplitude, org.stationMagnitude(i).amplitudeID()))
|
||||
|
||||
netmag = {}
|
||||
nmag = org.magnitudeCount()
|
||||
|
||||
bulletin = seiscomp.scbulletin.Bulletin(None, self._format)
|
||||
try:
|
||||
txt = bulletin.printEvent(evt)
|
||||
except:
|
||||
txt = ""
|
||||
|
||||
if self._directory is None:
|
||||
sys.stdout.write("%s" % ("#<\n" + txt + "#>\n"))
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
# Use created time to look up the proper directory
|
||||
try:
|
||||
arNow = evt.creationInfo().creationTime().get()
|
||||
# Otherwise use now (in case that event.created has not been set
|
||||
# which is always valid within the SC3 distribution
|
||||
except:
|
||||
arNow = now.get()
|
||||
seiscomp.logging.error("directory is " + self._directory + "/".join(
|
||||
["%.2d" % i for i in arNow[1:4]]) + "/" + evt.publicID() + "/")
|
||||
|
||||
directory = self._directory + \
|
||||
"/".join(["%.2d" % i for i in arNow[1:4]]) + \
|
||||
"/" + evt.publicID() + "/"
|
||||
if directory != self._currentDirectory:
|
||||
if createDirectory(directory) == False:
|
||||
seiscomp.logging.error(
|
||||
"Unable to create directory %s" % directory)
|
||||
return
|
||||
|
||||
self._currentDirectory = directory
|
||||
self.writeLog(self._currentDirectory + self.convertID(evt.publicID()) +
|
||||
"." + ("%06d" % self.eventProgress(evt.publicID(), directory)), txt, "w")
|
||||
self.writeLog(self._currentDirectory +
|
||||
self.convertID(evt.publicID()) + ".last", txt, "w")
|
||||
self.writeLog(self._directory + "last", txt, "w")
|
||||
self.writeLog(self._currentDirectory + self.convertID(evt.publicID()) + ".summary",
|
||||
"|".join(summary), "a",
|
||||
"# Layout: Timestamp, +OT (minutes, decimal), Latitude, Longitude, Depth, PhaseCount, MagType, Magnitude, MagCount")
|
||||
|
||||
seiscomp.logging.info("cache size = %d" % self._cache.size())
|
||||
|
||||
def printEventXML(self, evt, newEvent):
|
||||
now = seiscomp.core.Time.GMT()
|
||||
|
||||
# Load comments
|
||||
if evt.commentCount() == 0:
|
||||
self.query().loadComments(evt)
|
||||
|
||||
# Load origin references
|
||||
if evt.originReferenceCount() == 0:
|
||||
self.query().loadOriginReferences(evt)
|
||||
|
||||
# Load event descriptions
|
||||
if evt.eventDescriptionCount() == 0:
|
||||
self.query().loadEventDescriptions(evt)
|
||||
|
||||
org = self._cache.get(seiscomp.datamodel.Origin,
|
||||
evt.preferredOriginID())
|
||||
|
||||
if evt.preferredFocalMechanismID():
|
||||
fm = self._cache.get(
|
||||
seiscomp.datamodel.FocalMechanism, evt.preferredFocalMechanismID())
|
||||
else:
|
||||
fm = None
|
||||
|
||||
# Load comments
|
||||
if org.commentCount() == 0:
|
||||
self.query().loadComments(org)
|
||||
|
||||
# Load arrivals
|
||||
if org.arrivalCount() == 0:
|
||||
self.query().loadArrivals(org)
|
||||
prefmag = self._cache.get(
|
||||
seiscomp.datamodel.Magnitude, evt.preferredMagnitudeID())
|
||||
|
||||
wasEnabled = seiscomp.datamodel.PublicObject.IsRegistrationEnabled()
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
|
||||
|
||||
ep = seiscomp.datamodel.EventParameters()
|
||||
evt_cloned = seiscomp.datamodel.Event.Cast(evt.clone())
|
||||
ep.add(evt_cloned)
|
||||
|
||||
summary = self.getSummary(now, org, prefmag)
|
||||
|
||||
if fm:
|
||||
ep.add(fm)
|
||||
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled)
|
||||
|
||||
# Load focal mechainsm references
|
||||
if evt.focalMechanismReferenceCount() == 0:
|
||||
self.query().loadFocalMechanismReferences(evt)
|
||||
|
||||
# Load moment tensors
|
||||
if fm.momentTensorCount() == 0:
|
||||
self.query().loadMomentTensors(fm)
|
||||
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
|
||||
|
||||
# Copy focal mechanism reference
|
||||
fm_ref = evt.focalMechanismReference(
|
||||
seiscomp.datamodel.FocalMechanismReferenceIndex(fm.publicID()))
|
||||
if fm_ref:
|
||||
fm_ref_cloned = seiscomp.datamodel.FocalMechanismReference.Cast(
|
||||
fm_ref.clone())
|
||||
if fm_ref_cloned is None:
|
||||
fm_ref_cloned = seiscomp.datamodel.FocalMechanismReference(
|
||||
fm.publicID())
|
||||
evt_cloned.add(fm_ref_cloned)
|
||||
|
||||
nmt = fm.momentTensorCount()
|
||||
for i in range(nmt):
|
||||
mt = fm.momentTensor(i)
|
||||
if not mt.derivedOriginID():
|
||||
continue
|
||||
|
||||
# Origin already added
|
||||
if ep.findOrigin(mt.derivedOriginID()) is not None:
|
||||
continue
|
||||
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(
|
||||
wasEnabled)
|
||||
derivedOrigin = self._cache.get(
|
||||
seiscomp.datamodel.Origin, mt.derivedOriginID())
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
|
||||
|
||||
if derivedOrigin is None:
|
||||
seiscomp.logging.warning(
|
||||
"derived origin for MT %s not found" % mt.derivedOriginID())
|
||||
continue
|
||||
|
||||
# Origin has been read from database -> read all childs
|
||||
if not self._cache.cached():
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(
|
||||
wasEnabled)
|
||||
self.query().load(derivedOrigin)
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(
|
||||
False)
|
||||
|
||||
# Add it to the event parameters
|
||||
ep.add(derivedOrigin)
|
||||
|
||||
if org:
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled)
|
||||
|
||||
# Load magnitudes
|
||||
if org.magnitudeCount() == 0:
|
||||
self.query().loadMagnitudes(org)
|
||||
|
||||
if org.stationMagnitudeCount() == 0:
|
||||
self.query().loadStationMagnitudes(org)
|
||||
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
|
||||
|
||||
# Copy event comments
|
||||
ncmts = evt.commentCount()
|
||||
for i in range(ncmts):
|
||||
cmt_cloned = seiscomp.datamodel.Comment.Cast(
|
||||
evt.comment(i).clone())
|
||||
evt_cloned.add(cmt_cloned)
|
||||
|
||||
# Copy origin references
|
||||
org_ref = evt.originReference(
|
||||
seiscomp.datamodel.OriginReferenceIndex(org.publicID()))
|
||||
if org_ref:
|
||||
org_ref_cloned = seiscomp.datamodel.OriginReference.Cast(
|
||||
org_ref.clone())
|
||||
if org_ref_cloned is None:
|
||||
org_ref_cloned = seiscomp.datamodel.OriginReference(
|
||||
org.publicID())
|
||||
evt_cloned.add(org_ref_cloned)
|
||||
|
||||
# Copy event descriptions
|
||||
for i in range(evt.eventDescriptionCount()):
|
||||
ed_cloned = seiscomp.datamodel.EventDescription.Cast(
|
||||
evt.eventDescription(i).clone())
|
||||
evt_cloned.add(ed_cloned)
|
||||
|
||||
org_cloned = seiscomp.datamodel.Origin.Cast(org.clone())
|
||||
ep.add(org_cloned)
|
||||
|
||||
# Copy origin comments
|
||||
ncmts = org.commentCount()
|
||||
for i in range(ncmts):
|
||||
cmt_cloned = seiscomp.datamodel.Comment.Cast(
|
||||
org.comment(i).clone())
|
||||
org_cloned.add(cmt_cloned)
|
||||
|
||||
# Copy arrivals
|
||||
narr = org.arrivalCount()
|
||||
for i in range(narr):
|
||||
arr_cloned = seiscomp.datamodel.Arrival.Cast(
|
||||
org.arrival(i).clone())
|
||||
org_cloned.add(arr_cloned)
|
||||
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(
|
||||
wasEnabled)
|
||||
pick = self._cache.get(
|
||||
seiscomp.datamodel.Pick, arr_cloned.pickID())
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
|
||||
|
||||
if pick:
|
||||
pick_cloned = seiscomp.datamodel.Pick.Cast(pick.clone())
|
||||
ep.add(pick_cloned)
|
||||
|
||||
# Copy network magnitudes
|
||||
nmag = org.magnitudeCount()
|
||||
for i in range(nmag):
|
||||
mag = org.magnitude(i)
|
||||
|
||||
mag_cloned = seiscomp.datamodel.Magnitude.Cast(mag.clone())
|
||||
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(
|
||||
wasEnabled)
|
||||
if mag.stationMagnitudeContributionCount() == 0:
|
||||
self.query().loadStationMagnitudeContributions(mag)
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
|
||||
|
||||
# Copy magnitude references
|
||||
nmagref = mag.stationMagnitudeContributionCount()
|
||||
for j in range(nmagref):
|
||||
mag_ref_cloned = seiscomp.datamodel.StationMagnitudeContribution.Cast(
|
||||
mag.stationMagnitudeContribution(j).clone())
|
||||
mag_cloned.add(mag_ref_cloned)
|
||||
|
||||
org_cloned.add(mag_cloned)
|
||||
|
||||
# Copy station magnitudes and station amplitudes
|
||||
smag = org.stationMagnitudeCount()
|
||||
amp_map = dict()
|
||||
for i in range(smag):
|
||||
mag_cloned = seiscomp.datamodel.StationMagnitude.Cast(
|
||||
org.stationMagnitude(i).clone())
|
||||
org_cloned.add(mag_cloned)
|
||||
if (mag_cloned.amplitudeID() in amp_map) == False:
|
||||
amp_map[mag_cloned.amplitudeID()] = True
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(
|
||||
wasEnabled)
|
||||
amp = self._cache.get(
|
||||
seiscomp.datamodel.Amplitude, mag_cloned.amplitudeID())
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(
|
||||
False)
|
||||
if amp:
|
||||
amp_cloned = seiscomp.datamodel.Amplitude.Cast(
|
||||
amp.clone())
|
||||
ep.add(amp_cloned)
|
||||
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled)
|
||||
|
||||
# archive.create(event.publicID() + )
|
||||
ar = seiscomp.io.XMLArchive()
|
||||
ar.setFormattedOutput(True)
|
||||
|
||||
if self._directory is None:
|
||||
sys.stdout.write("#<\n")
|
||||
ar.create("-")
|
||||
ar.writeObject(ep)
|
||||
ar.close()
|
||||
sys.stdout.write("#>\n")
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
# Use created time to look up the proper directory
|
||||
try:
|
||||
arNow = evt.creationInfo().creationTime().get()
|
||||
# Otherwise use now (in case that event.created has not been set
|
||||
# which is always valid within the SC3 distribution
|
||||
except:
|
||||
arNow = now.get()
|
||||
|
||||
directory = self._directory + \
|
||||
"/".join(["%.2d" % i for i in arNow[1:4]]) + \
|
||||
"/" + evt.publicID() + "/"
|
||||
if directory != self._currentDirectory:
|
||||
if createDirectory(directory) == False:
|
||||
seiscomp.logging.error(
|
||||
"Unable to create directory %s" % directory)
|
||||
return
|
||||
|
||||
self._currentDirectory = directory
|
||||
# self.writeLog(self._currentDirectory + evt.publicID(), "#<\n" + txt + "#>\n")
|
||||
#self.writeLog(self._currentDirectory + evt.publicID() + ".last", txt, "w")
|
||||
ar.create(self._currentDirectory + self.convertID(evt.publicID()) + "." + ("%06d" %
|
||||
self.eventProgress(evt.publicID(), directory)) + ".xml" + self._revisionFileExt)
|
||||
ar.setCompression(True)
|
||||
if self._useGZIP:
|
||||
ar.setCompressionMethod(seiscomp.io.XMLArchive.GZIP)
|
||||
ar.writeObject(ep)
|
||||
ar.close()
|
||||
# Write last file to root
|
||||
ar.create(self._directory + "last.xml" + self._revisionFileExt)
|
||||
ar.setCompression(True)
|
||||
if self._useGZIP:
|
||||
ar.setCompressionMethod(seiscomp.io.XMLArchive.GZIP)
|
||||
ar.writeObject(ep)
|
||||
ar.close()
|
||||
# Write last xml
|
||||
ar.create(self._currentDirectory +
|
||||
self.convertID(evt.publicID()) + ".last.xml")
|
||||
ar.setCompression(False)
|
||||
ar.writeObject(ep)
|
||||
ar.close()
|
||||
self.writeLog(self._currentDirectory + self.convertID(evt.publicID()) + ".summary",
|
||||
"|".join(summary), "a",
|
||||
"# Layout: Timestamp, +OT (minutes, decimal), Latitude, Longitude, Depth, PhaseCount, MagType, Magnitude, MagCount")
|
||||
|
||||
del ep
|
||||
|
||||
def convertID(self, id):
|
||||
'''Converts an ID containing slashes to one without slashes'''
|
||||
p = re.compile('/')
|
||||
return p.sub('_', id)
|
||||
|
||||
def writeLog(self, file, text, mode="a", header=None):
|
||||
of = open(file, mode)
|
||||
if of:
|
||||
if of.tell() == 0 and not header is None:
|
||||
of.write(header+"\n")
|
||||
of.write(text+"\n")
|
||||
of.close()
|
||||
else:
|
||||
seiscomp.logging.error("Unable to write file: %s" % file)
|
||||
|
||||
def objectAboutToPop(self, obj):
|
||||
try:
|
||||
evt = seiscomp.datamodel.Event.Cast(obj)
|
||||
if evt:
|
||||
try:
|
||||
self._orgToEvent.pop(evt.preferredOriginID())
|
||||
self._eventToOrg.pop(evt.publicID())
|
||||
|
||||
self._magToEvent.pop(evt.preferredMagnitudeID())
|
||||
self._eventToMag.pop(evt.publicID())
|
||||
|
||||
self._eventProgress.pop(evt.publicID())
|
||||
return
|
||||
except:
|
||||
pass
|
||||
|
||||
org = seiscomp.datamodel.Origin.Cast(obj)
|
||||
if org:
|
||||
try:
|
||||
self._orgToEvent.pop(org.publicID())
|
||||
except:
|
||||
pass
|
||||
return
|
||||
|
||||
mag = seiscomp.datamodel.Magnitude.Cast(obj)
|
||||
if mag:
|
||||
try:
|
||||
self._magToEvent.pop(mag.publicID())
|
||||
except:
|
||||
pass
|
||||
return
|
||||
except:
|
||||
info = traceback.format_exception(*sys.exc_info())
|
||||
for i in info:
|
||||
sys.stderr.write(i)
|
||||
sys.exit(-1)
|
||||
|
||||
def eventProgress(self, evtID, directory):
|
||||
# The progress is already stored
|
||||
if evtID in self._eventProgress:
|
||||
return self._eventProgress[evtID]
|
||||
|
||||
# Find the maximum file counter
|
||||
maxid = -1
|
||||
files = os.listdir(directory)
|
||||
for file in files:
|
||||
if os.path.isfile(directory + file) == False:
|
||||
continue
|
||||
fid = file[len(evtID + '.'):len(file)]
|
||||
sep = fid.find('.')
|
||||
if sep == -1:
|
||||
sep = len(fid)
|
||||
fid = fid[0:sep]
|
||||
try:
|
||||
nid = int(fid)
|
||||
except:
|
||||
continue
|
||||
if nid > maxid:
|
||||
maxid = nid
|
||||
|
||||
maxid = maxid + 1
|
||||
self._eventProgress[evtID] = maxid
|
||||
return maxid
|
||||
|
||||
def advanceEventProgress(self, evtID):
|
||||
try:
|
||||
self._eventProgress[evtID] = self._eventProgress[evtID] + 1
|
||||
except:
|
||||
pass
|
||||
|
||||
def addObject(self, parentID, object):
|
||||
try:
|
||||
obj = seiscomp.datamodel.Event.Cast(object)
|
||||
if obj:
|
||||
self._cache.feed(obj)
|
||||
self._eventProgress[obj.publicID()] = 0
|
||||
self.printEvent(obj, True)
|
||||
self.updateCache(obj)
|
||||
return
|
||||
|
||||
# New Magnitudes or Origins are not important for
|
||||
# the history update but we feed it into the cache to
|
||||
# access them faster later on in case they will become
|
||||
# preferred entities
|
||||
obj = seiscomp.datamodel.Magnitude.Cast(object)
|
||||
if obj:
|
||||
self._cache.feed(obj)
|
||||
return
|
||||
|
||||
obj = seiscomp.datamodel.Origin.Cast(object)
|
||||
if obj:
|
||||
self._cache.feed(obj)
|
||||
return
|
||||
|
||||
obj = seiscomp.datamodel.Pick.Cast(object)
|
||||
if obj:
|
||||
self._cache.feed(obj)
|
||||
return
|
||||
|
||||
obj = seiscomp.datamodel.Amplitude.Cast(object)
|
||||
if obj:
|
||||
self._cache.feed(obj)
|
||||
return
|
||||
|
||||
except:
|
||||
info = traceback.format_exception(*sys.exc_info())
|
||||
for i in info:
|
||||
sys.stderr.write(i)
|
||||
sys.exit(-1)
|
||||
|
||||
def updateObject(self, parentID, object):
|
||||
try:
|
||||
obj = seiscomp.datamodel.Event.Cast(object)
|
||||
if obj:
|
||||
self._cache.feed(obj)
|
||||
self.printEvent(obj, False)
|
||||
self.updateCache(obj)
|
||||
return
|
||||
|
||||
# Updates of a Magnitude are only imported when it is
|
||||
# the preferred one.
|
||||
obj = seiscomp.datamodel.Magnitude.Cast(object)
|
||||
if obj:
|
||||
try:
|
||||
evtID = self._magToEvent[obj.publicID()]
|
||||
if evtID:
|
||||
self._cache.feed(obj)
|
||||
evt = self._cache.get(seiscomp.datamodel.Event, evtID)
|
||||
if evt:
|
||||
self.printEvent(evt, False)
|
||||
else:
|
||||
sys.stderr.write("Unable to fetch event for ID '%s' while update of magnitude '%s'\n" % (
|
||||
evtID, obj.publicID()))
|
||||
else:
|
||||
# Magnitude has not been associated to an event yet
|
||||
pass
|
||||
except:
|
||||
# Search the corresponding event from the database
|
||||
evt = self.query().getEventByPreferredMagnitudeID(obj.publicID())
|
||||
# Associate the event (even if None) with the magnitude ID
|
||||
if evt:
|
||||
self._magToEvent[obj.publicID()] = evt.publicID()
|
||||
self._cache.feed(obj)
|
||||
self.printEvent(evt, False)
|
||||
else:
|
||||
self._magToEvent[obj.publicID()] = None
|
||||
return
|
||||
|
||||
# Usually we do not update origins. To have it complete,
|
||||
# this case will be supported as well
|
||||
obj = seiscomp.datamodel.Origin.Cast(object)
|
||||
if obj:
|
||||
try:
|
||||
evtID = self._orgToEvent[obj.publicID()]
|
||||
if evtID:
|
||||
self._cache.feed(obj)
|
||||
evt = self._cache.get(seiscomp.datamodel.Event, evtID)
|
||||
if evt:
|
||||
self.printEvent(evt, False)
|
||||
else:
|
||||
sys.stderr.write("Unable to fetch event for ID '%s' while update of origin '%s'\n" % (
|
||||
evtID, obj.publicID()))
|
||||
else:
|
||||
# Origin has not been associated to an event yet
|
||||
pass
|
||||
except:
|
||||
# Search the corresponding event from the database
|
||||
evt = self.query().getEvent(obj.publicID())
|
||||
if evt:
|
||||
if evt.preferredOriginID() != obj.publicID():
|
||||
evt = None
|
||||
|
||||
# Associate the event (even if None) with the origin ID
|
||||
if evt:
|
||||
self._orgToEvent[obj.publicID()] = evt.publicID()
|
||||
self._cache.feed(obj)
|
||||
self.printEvent(evt, False)
|
||||
else:
|
||||
self._orgToEvent[obj.publicID()] = None
|
||||
return
|
||||
|
||||
return
|
||||
|
||||
except:
|
||||
info = traceback.format_exception(*sys.exc_info())
|
||||
for i in info:
|
||||
sys.stderr.write(i)
|
||||
sys.exit(-1)
|
||||
|
||||
def updateCache(self, evt):
|
||||
# Event-Origin update
|
||||
try:
|
||||
orgID = self._eventToOrg[evt.publicID()]
|
||||
if orgID != evt.preferredOriginID():
|
||||
self._orgToEvent.pop(orgID)
|
||||
except:
|
||||
# origin not yet registered
|
||||
pass
|
||||
|
||||
# Bind the current preferred origin ID to the event and
|
||||
# vice versa
|
||||
self._orgToEvent[evt.preferredOriginID()] = evt.publicID()
|
||||
self._eventToOrg[evt.publicID()] = evt.preferredOriginID()
|
||||
|
||||
# Event-Magnitude update
|
||||
try:
|
||||
magID = self._eventToMag[evt.publicID()]
|
||||
if magID != evt.preferredMagnitudeID():
|
||||
self._magToEvent.pop(magID)
|
||||
except:
|
||||
# not yet registered
|
||||
pass
|
||||
|
||||
# Bind the current preferred magnitude ID to the event and
|
||||
# vice versa
|
||||
self._magToEvent[evt.preferredMagnitudeID()] = evt.publicID()
|
||||
self._eventToMag[evt.publicID()] = evt.preferredMagnitudeID()
|
||||
|
||||
|
||||
app = EventHistory(len(sys.argv), sys.argv)
|
||||
sys.exit(app())
|
@ -0,0 +1,197 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
import sys
|
||||
import seiscomp.core
|
||||
import seiscomp.client
|
||||
import seiscomp.datamodel
|
||||
import seiscomp.logging
|
||||
|
||||
|
||||
def _parseTime(timestring):
|
||||
t = seiscomp.core.Time()
|
||||
if t.fromString(timestring, "%F %T"):
|
||||
return t
|
||||
if t.fromString(timestring, "%FT%T"):
|
||||
return t
|
||||
if t.fromString(timestring, "%FT%TZ"):
|
||||
return t
|
||||
return None
|
||||
|
||||
|
||||
class EventList(seiscomp.client.Application):
|
||||
|
||||
def __init__(self, argc, argv):
|
||||
seiscomp.client.Application.__init__(self, argc, argv)
|
||||
|
||||
self.setMessagingEnabled(False)
|
||||
self.setDatabaseEnabled(True, False)
|
||||
self.setDaemonEnabled(False)
|
||||
|
||||
self._startTime = None
|
||||
self._endTime = None
|
||||
self.hours = None
|
||||
self._delimiter = None
|
||||
self._modifiedAfterTime = None
|
||||
self._preferredOrigin = False
|
||||
|
||||
def createCommandLineDescription(self):
|
||||
self.commandline().addGroup("Events")
|
||||
self.commandline().addStringOption("Events", "begin",
|
||||
"Specify the lower bound of the "
|
||||
"time interval.")
|
||||
self.commandline().addStringOption("Events", "end",
|
||||
"Specify the upper bound of the "
|
||||
"time interval.")
|
||||
self.commandline().addStringOption("Events", "hours",
|
||||
"Start searching given hours before"
|
||||
" now. If set, --begin and --end "
|
||||
"are ignored.")
|
||||
self.commandline().addStringOption("Events", "modified-after",
|
||||
"Select events modified after the "
|
||||
"specified time.")
|
||||
|
||||
self.commandline().addGroup("Output")
|
||||
self.commandline().addStringOption("Output", "delimiter,D",
|
||||
"Specify the delimiter of the "
|
||||
"resulting event IDs. "
|
||||
"Default: '\\n')")
|
||||
self.commandline().addOption("Output", "preferred-origin,p",
|
||||
"Print the ID of the preferred origin "
|
||||
"along with the event ID.")
|
||||
return True
|
||||
|
||||
def init(self):
|
||||
if not seiscomp.client.Application.init(self):
|
||||
return False
|
||||
|
||||
try:
|
||||
self.hours = float(self.commandline().optionString("hours"))
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
end = "2500-01-01T00:00:00Z"
|
||||
if self.hours is None:
|
||||
try:
|
||||
start = self.commandline().optionString("begin")
|
||||
except RuntimeError:
|
||||
start = "1900-01-01T00:00:00Z"
|
||||
|
||||
self._startTime = _parseTime(start)
|
||||
if self._startTime is None:
|
||||
seiscomp.logging.error("Wrong 'begin' format '%s'" % start)
|
||||
return False
|
||||
seiscomp.logging.debug("Setting start to %s"
|
||||
% self._startTime.toString("%FT%TZ"))
|
||||
|
||||
try:
|
||||
end = self.commandline().optionString("end")
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
self._endTime = _parseTime(end)
|
||||
if self._endTime is None:
|
||||
seiscomp.logging.error("Wrong 'end' format '%s'" % end)
|
||||
return False
|
||||
seiscomp.logging.debug("Setting end to %s"
|
||||
% self._endTime.toString("%FT%TZ"))
|
||||
else:
|
||||
seiscomp.logging.debug("Time window set by hours option: ignoring "
|
||||
"all other time parameters")
|
||||
secs = self.hours*3600
|
||||
maxSecs = 596523 * 3600
|
||||
if secs > maxSecs:
|
||||
seiscomp.logging.error("Maximum hours exceeeded. Maximum is %i" % (maxSecs / 3600))
|
||||
return False
|
||||
|
||||
self._startTime = seiscomp.core.Time.UTC() - seiscomp.core.TimeSpan(secs)
|
||||
self._endTime = _parseTime(end)
|
||||
|
||||
try:
|
||||
self._delimiter = self.commandline().optionString("delimiter")
|
||||
except RuntimeError:
|
||||
self._delimiter = "\n"
|
||||
|
||||
try:
|
||||
modifiedAfter = self.commandline().optionString("modified-after")
|
||||
self._modifiedAfterTime = _parseTime(modifiedAfter)
|
||||
if self._modifiedAfterTime is None:
|
||||
seiscomp.logging.error("Wrong 'modified-after' format '%s'"
|
||||
% modifiedAfter)
|
||||
return False
|
||||
seiscomp.logging.debug(
|
||||
"Setting 'modified-after' time to %s" %
|
||||
self._modifiedAfterTime.toString("%FT%TZ"))
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._preferredOrigin = self.commandline().hasOption("preferred-origin")
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
def printUsage(self):
|
||||
|
||||
print('''Usage:
|
||||
scevtls [options]
|
||||
|
||||
List event IDs available in a given time range and print to stdout.''')
|
||||
|
||||
seiscomp.client.Application.printUsage(self)
|
||||
|
||||
print('''Examples:
|
||||
Print all event IDs from year 2022 and thereafter
|
||||
scevtls -d mysql://sysop:sysop@localhost/seiscomp --begin "2022-01-01 00:00:00"
|
||||
''')
|
||||
|
||||
def run(self):
|
||||
out = []
|
||||
seiscomp.logging.debug("Search interval: %s - %s" %
|
||||
(self._startTime, self._endTime))
|
||||
for obj in self.query().getEvents(self._startTime, self._endTime):
|
||||
evt = seiscomp.datamodel.Event.Cast(obj)
|
||||
if not evt:
|
||||
continue
|
||||
|
||||
if self._modifiedAfterTime is not None:
|
||||
try:
|
||||
if evt.creationInfo().modificationTime() < self._modifiedAfterTime:
|
||||
continue
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
outputString = evt.publicID()
|
||||
if self._preferredOrigin:
|
||||
try:
|
||||
outputString += " " + evt.preferredOriginID()
|
||||
except ValueError:
|
||||
outputString += " none"
|
||||
|
||||
out.append(outputString)
|
||||
|
||||
sys.stdout.write("%s\n" % self._delimiter.join(out))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
app = EventList(len(sys.argv), sys.argv)
|
||||
app()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -0,0 +1,432 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import sys
|
||||
|
||||
from seiscomp import client, core, datamodel, io
|
||||
|
||||
|
||||
class EventStreams(client.Application):
|
||||
|
||||
def __init__(self, argc, argv):
|
||||
client.Application.__init__(self, argc, argv)
|
||||
|
||||
self.setMessagingEnabled(False)
|
||||
self.setDatabaseEnabled(True, False)
|
||||
self.setDaemonEnabled(False)
|
||||
|
||||
self.eventID = None
|
||||
self.inputFile = None
|
||||
self.inputFormat = "xml"
|
||||
self.margin = [300]
|
||||
|
||||
self.allNetworks = True
|
||||
self.allStations = True
|
||||
self.allLocations = True
|
||||
self.allStreams = True
|
||||
self.allComponents = True
|
||||
|
||||
# filter
|
||||
self.network = None
|
||||
self.station = None
|
||||
|
||||
self.streams = []
|
||||
|
||||
# output format
|
||||
self.caps = False
|
||||
self.fdsnws = False
|
||||
|
||||
|
||||
def createCommandLineDescription(self):
|
||||
self.commandline().addGroup("Input")
|
||||
self.commandline().addStringOption(
|
||||
"Input", "input,i",
|
||||
"read event from XML file instead of database. Use '-' to read "
|
||||
"from stdin.")
|
||||
self.commandline().addStringOption(
|
||||
"Input", "format,f",
|
||||
"input format to use (xml [default], zxml (zipped xml), binary). "
|
||||
"Only relevant with --input.")
|
||||
|
||||
self.commandline().addGroup("Dump")
|
||||
self.commandline().addStringOption("Dump", "event,E", "event id")
|
||||
self.commandline().addStringOption(
|
||||
"Dump", "margin,m",
|
||||
"time margin around the picked time window, default is 300. Added "
|
||||
"before the first and after the last pick, respectively. Use 2 "
|
||||
"comma-separted values (before,after) for asymmetric margins, e.g. "
|
||||
"-m 120,300.")
|
||||
self.commandline().addStringOption(
|
||||
"Dump", "streams,S",
|
||||
"comma separated list of streams per station to add, e.g. BH,SH,HH")
|
||||
self.commandline().addOption(
|
||||
"Dump", "all-streams",
|
||||
"dump all streams. If unused, just streams with picks are dumped.")
|
||||
self.commandline().addIntOption(
|
||||
"Dump", "all-components,C",
|
||||
"all components or just the picked ones (0). Default is 1")
|
||||
self.commandline().addIntOption(
|
||||
"Dump", "all-locations,L",
|
||||
"all locations or just the picked ones (0). Default is 1")
|
||||
self.commandline().addOption(
|
||||
"Dump", "all-stations",
|
||||
"dump all stations from the same network. If unused, just stations "
|
||||
"with picks are dumped.")
|
||||
self.commandline().addOption(
|
||||
"Dump", "all-networks",
|
||||
"dump all networks. If unused, just networks with picks are dumped."
|
||||
" This option implies all-stations, all-locations, all-streams, "
|
||||
"all-components and will only provide the time window.")
|
||||
self.commandline().addOption(
|
||||
"Dump", "resolve-wildcards,R",
|
||||
"if all components are used, use inventory to resolve stream "
|
||||
"components instead of using '?' (important when Arclink should be "
|
||||
"used)")
|
||||
self.commandline().addStringOption(
|
||||
"Dump", "net-sta", "Filter streams by network code or network and "
|
||||
"station code. Format: NET or NET.STA")
|
||||
self.commandline().addOption(
|
||||
"Dump", "caps",
|
||||
"dump in capstool format (Common Acquisition Protocol Server by "
|
||||
"gempa GmbH)")
|
||||
self.commandline().addOption(
|
||||
"Dump", "fdsnws",
|
||||
"dump in FDSN dataselect webservice POST format")
|
||||
return True
|
||||
|
||||
|
||||
def validateParameters(self):
|
||||
if not client.Application.validateParameters(self):
|
||||
return False
|
||||
|
||||
if self.commandline().hasOption("resolve-wildcards"):
|
||||
self.setLoadStationsEnabled(True)
|
||||
|
||||
try:
|
||||
self.inputFile = self.commandline().optionString("input")
|
||||
self.setDatabaseEnabled(False, False)
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def init(self):
|
||||
|
||||
if not client.Application.init(self):
|
||||
return False
|
||||
|
||||
try:
|
||||
self.inputFormat = self.commandline().optionString("format")
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.eventID = self.commandline().optionString("event")
|
||||
except BaseException:
|
||||
if not self.inputFile:
|
||||
raise ValueError("An eventID is mandatory if no input file is "
|
||||
"specified")
|
||||
|
||||
try:
|
||||
self.margin = self.commandline().optionString("margin").split(",")
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.streams = self.commandline().optionString("streams").split(",")
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.allComponents = self.commandline().optionInt("all-components") != 0
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.allLocations = self.commandline().optionInt("all-locations") != 0
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
self.allStreams = self.commandline().hasOption("all-streams")
|
||||
self.allStations = self.commandline().hasOption("all-stations")
|
||||
self.allNetworks = self.commandline().hasOption("all-networks")
|
||||
|
||||
try:
|
||||
networkStation = self.commandline().optionString("net-sta")
|
||||
except RuntimeError:
|
||||
networkStation = None
|
||||
|
||||
if networkStation:
|
||||
try:
|
||||
self.network = networkStation.split('.')[0]
|
||||
except IndexError:
|
||||
print("Error in network code '{}': Use '--net-sta' with "
|
||||
"format NET or NET.STA".format(networkStation), file=sys.stderr)
|
||||
return False
|
||||
|
||||
try:
|
||||
self.station = networkStation.split('.')[1]
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
self.caps = self.commandline().hasOption("caps")
|
||||
self.fdsnws = self.commandline().hasOption("fdsnws")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def printUsage(self):
|
||||
|
||||
print('''Usage:
|
||||
scevtstreams [options]
|
||||
|
||||
Extract stream information and time windows from an event''')
|
||||
|
||||
client.Application.printUsage(self)
|
||||
|
||||
print('''Examples:
|
||||
Get the time windows for an event in the database:
|
||||
scevtstreams -E gfz2012abcd -d mysql://sysop:sysop@localhost/seiscomp
|
||||
|
||||
Create lists compatible with fdsnws:
|
||||
scevtstreams -E gfz2012abcd -i event.xml -m 120,500 --fdsnws
|
||||
''')
|
||||
|
||||
def run(self):
|
||||
|
||||
resolveWildcards = self.commandline().hasOption("resolve-wildcards")
|
||||
|
||||
picks = []
|
||||
|
||||
# read picks from input file
|
||||
if self.inputFile:
|
||||
picks = self.readXML()
|
||||
if not picks:
|
||||
raise ValueError("Could not find picks in input file")
|
||||
|
||||
# read picks from database
|
||||
else:
|
||||
for obj in self.query().getEventPicks(self.eventID):
|
||||
pick = datamodel.Pick.Cast(obj)
|
||||
if pick is None:
|
||||
continue
|
||||
picks.append(pick)
|
||||
|
||||
if not picks:
|
||||
raise ValueError("Could not find picks for event {} in "
|
||||
"database".format(self.eventID))
|
||||
|
||||
# filter picks
|
||||
pickFiltered = []
|
||||
if self.network:
|
||||
for pick in picks:
|
||||
if pick.waveformID().networkCode() != self.network:
|
||||
continue
|
||||
if self.station and self.station != pick.waveformID().stationCode():
|
||||
continue
|
||||
pickFiltered.append(pick)
|
||||
|
||||
picks = pickFiltered
|
||||
|
||||
if not picks:
|
||||
raise ValueError("All picks filtered out")
|
||||
|
||||
# calculate minimum and maximum pick time
|
||||
minTime = None
|
||||
maxTime = None
|
||||
for pick in picks:
|
||||
if minTime is None or minTime > pick.time().value():
|
||||
minTime = pick.time().value()
|
||||
|
||||
if maxTime is None or maxTime < pick.time().value():
|
||||
maxTime = pick.time().value()
|
||||
|
||||
# add time margin(s), no need for None check since pick time is
|
||||
# mandatory and at least on pick exists
|
||||
minTime = minTime - core.TimeSpan(float(self.margin[0]))
|
||||
maxTime = maxTime + core.TimeSpan(float(self.margin[-1]))
|
||||
|
||||
# convert times to string dependend on requested output format
|
||||
if self.caps:
|
||||
timeFMT = "%Y,%m,%d,%H,%M,%S"
|
||||
elif self.fdsnws:
|
||||
timeFMT = "%FT%T"
|
||||
else:
|
||||
timeFMT = "%F %T"
|
||||
minTime = minTime.toString(timeFMT)
|
||||
maxTime = maxTime.toString(timeFMT)
|
||||
|
||||
inv = client.Inventory.Instance().inventory()
|
||||
|
||||
lines = set()
|
||||
for pick in picks:
|
||||
net = pick.waveformID().networkCode()
|
||||
station = pick.waveformID().stationCode()
|
||||
loc = pick.waveformID().locationCode()
|
||||
streams = [pick.waveformID().channelCode()]
|
||||
rawStream = streams[0][:2]
|
||||
|
||||
if self.allComponents:
|
||||
if resolveWildcards:
|
||||
iloc = datamodel.getSensorLocation(inv, pick)
|
||||
if iloc:
|
||||
tc = datamodel.ThreeComponents()
|
||||
datamodel.getThreeComponents(
|
||||
tc, iloc, rawStream, pick.time().value())
|
||||
streams = []
|
||||
if tc.vertical():
|
||||
streams.append(tc.vertical().code())
|
||||
if tc.firstHorizontal():
|
||||
streams.append(tc.firstHorizontal().code())
|
||||
if tc.secondHorizontal():
|
||||
streams.append(tc.secondHorizontal().code())
|
||||
else:
|
||||
streams = [rawStream + "?"]
|
||||
|
||||
if self.allLocations:
|
||||
loc = "*"
|
||||
|
||||
if self.allStations:
|
||||
station = "*"
|
||||
|
||||
if self.allNetworks:
|
||||
net = "*"
|
||||
station = "*"
|
||||
loc = "*"
|
||||
|
||||
# FDSNWS requires empty location to be encoded by 2 dashes
|
||||
if not loc and self.fdsnws:
|
||||
loc = "--"
|
||||
|
||||
# line format
|
||||
if self.caps:
|
||||
lineFMT = "{0} {1} {2} {3} {4} {5}"
|
||||
elif self.fdsnws:
|
||||
lineFMT = "{2} {3} {4} {5} {0} {1}"
|
||||
else:
|
||||
lineFMT = "{0};{1};{2}.{3}.{4}.{5}"
|
||||
|
||||
for s in streams:
|
||||
if self.allStreams or self.allNetworks:
|
||||
s = "*"
|
||||
|
||||
lines.add(lineFMT.format(
|
||||
minTime, maxTime, net, station, loc, s))
|
||||
|
||||
for s in self.streams:
|
||||
if s == rawStream:
|
||||
continue
|
||||
|
||||
if self.allStreams or self.allNetworks:
|
||||
s = "*"
|
||||
|
||||
lines.add(lineFMT.format(
|
||||
minTime, maxTime, net, station, loc, s + streams[0][2]))
|
||||
|
||||
for line in sorted(lines):
|
||||
print(line, file=sys.stdout)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def readXML(self):
|
||||
|
||||
if self.inputFormat == "xml":
|
||||
ar = io.XMLArchive()
|
||||
elif self.inputFormat == "zxml":
|
||||
ar = io.XMLArchive()
|
||||
ar.setCompression(True)
|
||||
elif self.inputFormat == "binary":
|
||||
ar = io.VBinaryArchive()
|
||||
else:
|
||||
raise TypeError("unknown input format '{}'".format(
|
||||
self.inputFormat))
|
||||
|
||||
if not ar.open(self.inputFile):
|
||||
raise IOError("unable to open input file")
|
||||
|
||||
obj = ar.readObject()
|
||||
if obj is None:
|
||||
raise TypeError("invalid input file format")
|
||||
|
||||
ep = datamodel.EventParameters.Cast(obj)
|
||||
if ep is None:
|
||||
raise ValueError("no event parameters found in input file")
|
||||
|
||||
# we require at least one origin which references to picks via arrivals
|
||||
if ep.originCount() == 0:
|
||||
raise ValueError("no origin found in input file")
|
||||
|
||||
originIDs = []
|
||||
|
||||
# search for a specific event id
|
||||
if self.eventID:
|
||||
ev = datamodel.Event.Find(self.eventID)
|
||||
if ev:
|
||||
originIDs = [ev.originReference(i).originID() \
|
||||
for i in range(ev.originReferenceCount())]
|
||||
else:
|
||||
raise ValueError("event id {} not found in input file".format(
|
||||
self.eventID))
|
||||
|
||||
# use first event/origin if no id was specified
|
||||
else:
|
||||
# no event, use first available origin
|
||||
if ep.eventCount() == 0:
|
||||
if ep.originCount() > 1:
|
||||
print("WARNING: Input file contains no event but more than "
|
||||
"1 origin. Considering only first origin",
|
||||
file=sys.stderr)
|
||||
originIDs.append(ep.origin(0).publicID())
|
||||
|
||||
# use origin references of first available event
|
||||
else:
|
||||
if ep.eventCount() > 1:
|
||||
print("WARNING: Input file contains more than 1 event. "
|
||||
"Considering only first event", file=sys.stderr)
|
||||
ev = ep.event(0)
|
||||
originIDs = [ev.originReference(i).originID() \
|
||||
for i in range(ev.originReferenceCount())]
|
||||
|
||||
# collect pickIDs
|
||||
pickIDs = set()
|
||||
for oID in originIDs:
|
||||
o = datamodel.Origin.Find(oID)
|
||||
if o is None:
|
||||
continue
|
||||
|
||||
for i in range(o.arrivalCount()):
|
||||
pickIDs.add(o.arrival(i).pickID())
|
||||
|
||||
# lookup picks
|
||||
picks = []
|
||||
for pickID in pickIDs:
|
||||
pick = datamodel.Pick.Find(pickID)
|
||||
if pick:
|
||||
picks.append(pick)
|
||||
|
||||
return picks
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
app = EventStreams(len(sys.argv), sys.argv)
|
||||
sys.exit(app())
|
||||
except (ValueError, TypeError) as e:
|
||||
print("ERROR: {}".format(e), file=sys.stderr)
|
||||
sys.exit(1)
|
@ -0,0 +1,38 @@
|
||||
#!/bin/bash
|
||||
# Initializes a GIT repository in $SEISCOMP_ROOT and adds important
|
||||
# configuration files from 'etc' and 'share' directory
|
||||
#
|
||||
# Author: Stephan Herrnkind <herrnkind@gempa.de>
|
||||
|
||||
|
||||
# search for SeisComP path
|
||||
if [ x"$SEISCOMP_ROOT" = x ]; then
|
||||
echo "SEISCOMP_ROOT not set"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# search git binary
|
||||
which git > /dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "git binary not found"
|
||||
exit 2
|
||||
fi
|
||||
|
||||
cd $SEISCOMP_ROOT || exit 3
|
||||
|
||||
# initialize git if necessary
|
||||
[ -d .git ] || git rev-parse --git-dir > /dev/null 2>&1
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "GIT repository in $SEISCOMP_ROOT already initialized"
|
||||
else
|
||||
git init || exit 4
|
||||
fi
|
||||
|
||||
# add files
|
||||
git add etc
|
||||
find share -type f -regex \
|
||||
".*\.\(bna\|cfg\|conf\|htaccess\|kml\|py\|sh\|tpl\|tvel\|txt\|xml\)" \
|
||||
-execdir git add {} +
|
||||
|
||||
echo "files added to GIT, use 'git status' to get an overview and " \
|
||||
"'git commit' to commit them"
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,84 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
import sys
|
||||
import getopt
|
||||
import seiscomp.io
|
||||
import seiscomp.datamodel
|
||||
|
||||
|
||||
usage = """scml2inv [options] input output=stdout
|
||||
|
||||
Options:
|
||||
-h [ --help ] Produce help message
|
||||
-f Enable formatted XML output
|
||||
"""
|
||||
|
||||
|
||||
def main(argv):
|
||||
formatted = False
|
||||
|
||||
# parse command line options
|
||||
try:
|
||||
opts, args = getopt.getopt(argv[1:], "hf", ["help"])
|
||||
except getopt.error as msg:
|
||||
sys.stderr.write("%s\n" % msg)
|
||||
sys.stderr.write("for help use --help\n")
|
||||
return 1
|
||||
|
||||
for o, a in opts:
|
||||
if o in ["-h", "--help"]:
|
||||
sys.stderr.write("%s\n" % usage)
|
||||
return 1
|
||||
elif o in ["-f"]:
|
||||
formatted = True
|
||||
|
||||
argv = args
|
||||
if len(argv) < 1:
|
||||
sys.stderr.write("Missing input file\n")
|
||||
return 1
|
||||
|
||||
ar = seiscomp.io.XMLArchive()
|
||||
if not ar.open(argv[0]):
|
||||
sys.stderr.write("Unable to parse input file: %s\n" % argv[0])
|
||||
return 2
|
||||
|
||||
obj = ar.readObject()
|
||||
ar.close()
|
||||
|
||||
if obj is None:
|
||||
sys.stderr.write("Empty document in %s\n" % argv[0])
|
||||
return 3
|
||||
|
||||
inv = seiscomp.datamodel.Inventory.Cast(obj)
|
||||
if inv is None:
|
||||
sys.stderr.write("No inventory found in %s\n" % argv[0])
|
||||
return 4
|
||||
|
||||
if len(argv) < 2:
|
||||
output_file = "-"
|
||||
else:
|
||||
output_file = argv[1]
|
||||
|
||||
ar.create(output_file)
|
||||
ar.setFormattedOutput(formatted)
|
||||
ar.writeObject(inv)
|
||||
ar.close()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv))
|
@ -0,0 +1,416 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import argparse
|
||||
import seiscomp.core
|
||||
import seiscomp.io
|
||||
|
||||
|
||||
class MyArgumentParser(argparse.ArgumentParser):
|
||||
def format_epilog(self):
|
||||
return self.epilog
|
||||
|
||||
|
||||
def str2time(timestring):
|
||||
"""
|
||||
Liberally accept many time string formats and convert them to a
|
||||
seiscomp.core.Time
|
||||
"""
|
||||
|
||||
timestring = timestring.strip()
|
||||
for c in ["-", "/", ":", "T", "Z"]:
|
||||
timestring = timestring.replace(c, " ")
|
||||
timestring = timestring.split()
|
||||
assert 3 <= len(timestring) <= 6
|
||||
timestring.extend((6 - len(timestring)) * ["0"])
|
||||
timestring = " ".join(timestring)
|
||||
timeFormat = "%Y %m %d %H %M %S"
|
||||
if timestring.find(".") != -1:
|
||||
timeFormat += ".%f"
|
||||
|
||||
time = seiscomp.core.Time()
|
||||
time.fromString(timestring, timeFormat)
|
||||
return time
|
||||
|
||||
|
||||
def time2str(time):
|
||||
"""
|
||||
Convert a seiscomp.core.Time to a string
|
||||
"""
|
||||
return time.toString("%Y-%m-%d %H:%M:%S.%f000000")[:23]
|
||||
|
||||
|
||||
def recordInput(filename=None, datatype=seiscomp.core.Array.INT):
|
||||
"""
|
||||
Simple Record iterator that reads from a file (to be specified by
|
||||
filename) or -- if no filename was specified -- reads from standard input
|
||||
"""
|
||||
|
||||
stream = seiscomp.io.RecordStream.Create("file")
|
||||
if not stream:
|
||||
raise IOError("failed to create a RecordStream")
|
||||
|
||||
if not filename:
|
||||
filename = "-"
|
||||
|
||||
if filename == "-":
|
||||
print(
|
||||
"Waiting for data input from stdin. Use Ctrl + C to interrupt.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
else:
|
||||
if not os.path.exists(filename):
|
||||
print("Cannot find file {}".format(filename), file=sys.stderr)
|
||||
sys.exit()
|
||||
|
||||
if not stream.setSource(filename):
|
||||
print(" + failed to assign source file to RecordStream", file=sys.stderr)
|
||||
sys.exit()
|
||||
|
||||
records = seiscomp.io.RecordInput(stream, datatype, seiscomp.core.Record.SAVE_RAW)
|
||||
|
||||
while True:
|
||||
try:
|
||||
record = next(records)
|
||||
except Exception:
|
||||
print("Received invalid or no input", file=sys.stderr)
|
||||
sys.exit()
|
||||
|
||||
if not record:
|
||||
return
|
||||
yield record
|
||||
|
||||
|
||||
tmin = str2time("1970-01-01 00:00:00")
|
||||
tmax = str2time("2500-01-01 00:00:00")
|
||||
ifile = "-"
|
||||
|
||||
description = (
|
||||
"Read unsorted and possibly multiplexed miniSEED files. "
|
||||
"Sort data by time (multiplexing) and filter the individual "
|
||||
"records by time and/or streams. Apply this before playbacks "
|
||||
"and waveform archiving."
|
||||
)
|
||||
|
||||
epilog = (
|
||||
"Examples:\n"
|
||||
"Read data from multiple files, extract streams by time, sort records by start "
|
||||
"time, remove duplicate records\n"
|
||||
" cat f1.mseed f2.mseed f3.mseed |\\\n"
|
||||
" scmssort -v -t '2007-03-28 15:48~2007-03-28 16:18' -u > sorted.mseed\n"
|
||||
"\n"
|
||||
"Extract streams by time, stream code and sort records by end time\n"
|
||||
" echo CX.PB01..BH? |\\ \n"
|
||||
" scmssort -v -E -t '2007-03-28 15:48~2007-03-28 16:18' -u -l - test.mseed > "
|
||||
"sorted.mseed"
|
||||
)
|
||||
|
||||
|
||||
# p = MyArgumentParser(
|
||||
# usage="\n %prog [options] [files | < ] > ", description=description, epilog=epilog
|
||||
# )
|
||||
p = MyArgumentParser(
|
||||
description=description,
|
||||
epilog=epilog,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
)
|
||||
p.add_argument(
|
||||
"-E",
|
||||
"--sort-by-end-time",
|
||||
action="store_true",
|
||||
help="Sort according to record end time; default is start time.",
|
||||
)
|
||||
p.add_argument(
|
||||
"-r",
|
||||
"--rm",
|
||||
action="store_true",
|
||||
help="Remove all traces in stream list given by --list instead of keeping them.",
|
||||
)
|
||||
p.add_argument(
|
||||
"-l",
|
||||
"--list",
|
||||
action="store",
|
||||
help="File with stream list to filter the records. "
|
||||
"One stream per line. Instead of a file read the from stdin (-). "
|
||||
"Line format: NET.STA.LOC.CHA - wildcards and regular expressions "
|
||||
"are considered. Example: CX.*..BH?.",
|
||||
)
|
||||
p.add_argument(
|
||||
"-t",
|
||||
"--time-window",
|
||||
action="store",
|
||||
help="Specify time window (as one -properly quoted- string). Times "
|
||||
"are of course UTC and separated by a tilde '~'.",
|
||||
)
|
||||
p.add_argument(
|
||||
"-u",
|
||||
"--uniqueness",
|
||||
action="store_true",
|
||||
help="Ensure uniqueness of output, i.e. skip duplicate records.",
|
||||
)
|
||||
p.add_argument("-v", "--verbose", action="store_true", help="Run in verbose mode.")
|
||||
|
||||
p.add_argument(
|
||||
"filenames",
|
||||
nargs="+",
|
||||
help="Names of input files in miniSEED format.",
|
||||
)
|
||||
opt = p.parse_args()
|
||||
filenames = opt.filenames
|
||||
|
||||
if opt.time_window:
|
||||
tmin, tmax = list(map(str2time, opt.time_window.split("~")))
|
||||
|
||||
if opt.verbose:
|
||||
print(
|
||||
"Considered time window: %s~%s" % (time2str(tmin), time2str(tmax)),
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
listFile = None
|
||||
removeStreams = False
|
||||
if opt.list:
|
||||
listFile = opt.list
|
||||
print("Considered stream list from: %s" % (listFile), file=sys.stderr)
|
||||
|
||||
if opt.rm:
|
||||
removeStreams = True
|
||||
print("Removing listed streams", file=sys.stderr)
|
||||
|
||||
|
||||
def _time(record):
|
||||
if opt.sort_by_end_time:
|
||||
return seiscomp.core.Time(record.endTime())
|
||||
return seiscomp.core.Time(record.startTime())
|
||||
|
||||
|
||||
def _in_time_window(record, tMin, tMax):
|
||||
return record.endTime() >= tMin and record.startTime() <= tMax
|
||||
|
||||
|
||||
def readStreamList(file):
|
||||
streamList = []
|
||||
|
||||
try:
|
||||
if file == "-":
|
||||
f = sys.stdin
|
||||
file = "stdin"
|
||||
else:
|
||||
f = open(listFile, "r", encoding="utf-8")
|
||||
except FileNotFoundError:
|
||||
print("%s: error: unable to open" % listFile, file=sys.stderr)
|
||||
return []
|
||||
|
||||
lineNumber = -1
|
||||
for line in f:
|
||||
lineNumber = lineNumber + 1
|
||||
line = line.strip()
|
||||
# ignore comments
|
||||
if len(line) > 0 and line[0] == "#":
|
||||
continue
|
||||
|
||||
if len(line) == 0:
|
||||
continue
|
||||
|
||||
toks = line.split(".")
|
||||
if len(toks) != 4:
|
||||
f.close()
|
||||
print(
|
||||
"error: %s in line %d has invalid line format, expected "
|
||||
"stream list: NET.STA.LOC.CHA - 1 line per stream including "
|
||||
"regular expressions" % (listFile, lineNumber),
|
||||
file=sys.stderr,
|
||||
)
|
||||
return []
|
||||
|
||||
streamList.append(line)
|
||||
|
||||
f.close()
|
||||
|
||||
if len(streamList) == 0:
|
||||
return []
|
||||
|
||||
return streamList
|
||||
|
||||
|
||||
if not filenames:
|
||||
filenames = ["-"]
|
||||
|
||||
streams = None
|
||||
if listFile:
|
||||
streams = readStreamList(listFile)
|
||||
if not streams and not removeStreams:
|
||||
print(" + cannot extract data", file=sys.stderr)
|
||||
sys.exit()
|
||||
|
||||
if opt.verbose:
|
||||
string = " + streams: "
|
||||
|
||||
for stream in streams:
|
||||
string += stream + " "
|
||||
print("%s" % (string), file=sys.stderr)
|
||||
|
||||
pattern = re.compile("|".join(streams))
|
||||
|
||||
readRecords = 0
|
||||
networks = set()
|
||||
stations = set()
|
||||
locations = set()
|
||||
channels = set()
|
||||
readStreams = set()
|
||||
outEnd = None
|
||||
outStart = None
|
||||
|
||||
if filenames:
|
||||
first = None
|
||||
time_raw = []
|
||||
for fileName in filenames:
|
||||
if opt.verbose:
|
||||
print("Reading file '%s'" % fileName, file=sys.stderr)
|
||||
|
||||
for rec in recordInput(fileName):
|
||||
if not rec:
|
||||
continue
|
||||
|
||||
if not _in_time_window(rec, tmin, tmax):
|
||||
continue
|
||||
|
||||
raw = rec.raw().str()
|
||||
streamCode = "%s.%s.%s.%s" % (
|
||||
rec.networkCode(),
|
||||
rec.stationCode(),
|
||||
rec.locationCode(),
|
||||
rec.channelCode(),
|
||||
)
|
||||
|
||||
if listFile:
|
||||
foundStream = False
|
||||
|
||||
if pattern.match(streamCode):
|
||||
foundStream = True
|
||||
|
||||
if removeStreams:
|
||||
foundStream = not foundStream
|
||||
|
||||
if not foundStream:
|
||||
continue
|
||||
|
||||
# collect statistics for verbosity mode
|
||||
if opt.verbose:
|
||||
networks.add(rec.networkCode())
|
||||
stations.add(rec.stationCode())
|
||||
locations.add(rec.locationCode())
|
||||
channels.add(rec.channelCode())
|
||||
readStreams.add(streamCode)
|
||||
readRecords += 1
|
||||
|
||||
start = rec.startTime()
|
||||
end = rec.endTime()
|
||||
|
||||
if (outStart is None) or (start < outStart):
|
||||
outStart = seiscomp.core.Time(start)
|
||||
|
||||
if (outEnd is None) or (end > outEnd):
|
||||
outEnd = seiscomp.core.Time(end)
|
||||
|
||||
t = _time(rec)
|
||||
if first is None:
|
||||
first = t
|
||||
t = float(t - first) # float needs less memory
|
||||
time_raw.append((t, raw))
|
||||
|
||||
if opt.verbose:
|
||||
print(
|
||||
" + %d networks, %d stations, %d sensor locations, "
|
||||
"%d channel codes, %d streams, %d records"
|
||||
% (
|
||||
len(networks),
|
||||
len(stations),
|
||||
len(locations),
|
||||
len(channels),
|
||||
len(readStreams),
|
||||
readRecords,
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
print("Sorting records", file=sys.stderr)
|
||||
time_raw.sort()
|
||||
|
||||
if opt.verbose:
|
||||
print("Writing output", file=sys.stderr)
|
||||
previous = None
|
||||
|
||||
out = sys.stdout
|
||||
try:
|
||||
# needed in Python 3, fails in Python 2
|
||||
out = out.buffer
|
||||
except AttributeError:
|
||||
# assuming this is Python 2, nothing to be done
|
||||
pass
|
||||
|
||||
duplicates = 0
|
||||
for item in time_raw:
|
||||
if item == previous:
|
||||
duplicates += 1
|
||||
if opt.uniqueness:
|
||||
continue
|
||||
|
||||
t, raw = item
|
||||
out.write(raw)
|
||||
|
||||
previous = item
|
||||
|
||||
if opt.verbose:
|
||||
print("Finished", file=sys.stderr)
|
||||
if opt.uniqueness:
|
||||
print(
|
||||
" + found and removed {} duplicate records".format(duplicates),
|
||||
file=sys.stderr,
|
||||
)
|
||||
else:
|
||||
if duplicates > 0:
|
||||
print(
|
||||
" + found {} duplicate records - remove with: scmssort -u".format(
|
||||
duplicates
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
else:
|
||||
print(" + found 0 duplicate records", file=sys.stderr)
|
||||
|
||||
print("Output:", file=sys.stderr)
|
||||
if outStart and outEnd:
|
||||
print(
|
||||
" + time window: %s~%s"
|
||||
% (seiscomp.core.Time(outStart), seiscomp.core.Time(outEnd)),
|
||||
file=sys.stderr,
|
||||
)
|
||||
else:
|
||||
print("No data found in time window", file=sys.stderr)
|
||||
|
||||
else:
|
||||
# This is an important hint which should always be printed
|
||||
if duplicates > 0 and not opt.uniqueness:
|
||||
print(
|
||||
"Found {} duplicate records - remove with: scmssort -u".format(
|
||||
duplicates
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
Binary file not shown.
@ -0,0 +1,131 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
import sys
|
||||
import seiscomp.core
|
||||
import seiscomp.client
|
||||
import seiscomp.datamodel
|
||||
|
||||
|
||||
class OriginList(seiscomp.client.Application):
|
||||
def __init__(self, argc, argv):
|
||||
seiscomp.client.Application.__init__(self, argc, argv)
|
||||
|
||||
self.setMessagingEnabled(False)
|
||||
self.setDatabaseEnabled(True, False)
|
||||
self.setDaemonEnabled(False)
|
||||
|
||||
self._startTime = seiscomp.core.Time()
|
||||
self._endTime = seiscomp.core.Time.GMT()
|
||||
self._delimiter = None
|
||||
|
||||
def createCommandLineDescription(self):
|
||||
self.commandline().addGroup("Origins")
|
||||
self.commandline().addStringOption("Origins", "begin",
|
||||
"The lower bound of the time interval. Format: '1970-01-01 00:00:00'.")
|
||||
self.commandline().addStringOption("Origins", "end",
|
||||
"The upper bound of the time interval. Format: '1970-01-01 00:00:00'.")
|
||||
self.commandline().addStringOption("Origins", "author",
|
||||
"The author of the origins.")
|
||||
|
||||
self.commandline().addGroup("Output")
|
||||
self.commandline().addStringOption("Output", "delimiter,D",
|
||||
"The delimiter of the resulting "
|
||||
"origin IDs. Default: '\\n')")
|
||||
return True
|
||||
|
||||
def init(self):
|
||||
if not seiscomp.client.Application.init(self):
|
||||
return False
|
||||
|
||||
try:
|
||||
start = self.commandline().optionString("begin")
|
||||
if not self._startTime.fromString(start, "%F %T"):
|
||||
print("Wrong 'begin' given -> assuming {}"
|
||||
.format(self._startTime), file=sys.stderr)
|
||||
except RuntimeError:
|
||||
print("No 'begin' given -> assuming {}".format(self._startTime),
|
||||
file=sys.stderr)
|
||||
|
||||
try:
|
||||
end = self.commandline().optionString("end")
|
||||
if not self._endTime.fromString(end, "%F %T"):
|
||||
print("Wrong 'end' given -> assuming {}"
|
||||
.format(self._endTime), file=sys.stderr)
|
||||
except RuntimeError:
|
||||
print("No 'end' given -> assuming {}".format(self._endTime),
|
||||
file=sys.stderr)
|
||||
|
||||
try:
|
||||
self.author = self.commandline().optionString("author")
|
||||
sys.stderr.write("%s author used for output\n" % (self.author))
|
||||
except RuntimeError:
|
||||
self.author = False
|
||||
|
||||
try:
|
||||
self._delimiter = self.commandline().optionString("delimiter")
|
||||
except RuntimeError:
|
||||
self._delimiter = "\n"
|
||||
|
||||
# sys.stderr.write("Setting end to %s\n" % self._endTime.toString("%F %T"))
|
||||
|
||||
return True
|
||||
|
||||
def printUsage(self):
|
||||
|
||||
print('''Usage:
|
||||
scorgls [options]
|
||||
|
||||
List origin IDs available in a given time range and print to stdout.''')
|
||||
|
||||
seiscomp.client.Application.printUsage(self)
|
||||
|
||||
print('''Examples:
|
||||
Print all origin IDs from year 2022 and thereafter
|
||||
scorgls -d mysql://sysop:sysop@localhost/seiscomp --begin "2022-01-01 00:00:00"
|
||||
''')
|
||||
|
||||
def run(self):
|
||||
seiscomp.logging.debug("Search interval: %s - %s" %
|
||||
(self._startTime, self._endTime))
|
||||
out = []
|
||||
q = "select PublicObject.%s, Origin.* from Origin, PublicObject where Origin._oid=PublicObject._oid and Origin.%s >= '%s' and Origin.%s < '%s'" %\
|
||||
(self.database().convertColumnName("publicID"),
|
||||
self.database().convertColumnName("time_value"),
|
||||
self.database().timeToString(self._startTime),
|
||||
self.database().convertColumnName("time_value"),
|
||||
self.database().timeToString(self._endTime))
|
||||
|
||||
if self.author:
|
||||
q += " and Origin.%s = '%s' " %\
|
||||
(self.database().convertColumnName("creationInfo_author"),
|
||||
self.query().toString(self.author))
|
||||
|
||||
for obj in self.query().getObjectIterator(q, seiscomp.datamodel.Origin.TypeInfo()):
|
||||
org = seiscomp.datamodel.Origin.Cast(obj)
|
||||
if org:
|
||||
out.append(org.publicID())
|
||||
|
||||
print("{}\n".format(self._delimiter.join(out)), file=sys.stdout)
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
app = OriginList(len(sys.argv), sys.argv)
|
||||
app()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Binary file not shown.
@ -0,0 +1,328 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
import time, sys, os, traceback
|
||||
import seiscomp.core, seiscomp.client, seiscomp.datamodel
|
||||
import seiscomp.logging, seiscomp.system
|
||||
|
||||
|
||||
def createDirectory(dir):
|
||||
if os.access(dir, os.W_OK):
|
||||
return True
|
||||
|
||||
try:
|
||||
os.makedirs(dir)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def timeToString(t):
|
||||
return t.toString("%T.%6f")
|
||||
|
||||
|
||||
def timeSpanToString(ts):
|
||||
neg = ts.seconds() < 0 or ts.microseconds() < 0
|
||||
secs = abs(ts.seconds())
|
||||
days = secs / 86400
|
||||
daySecs = secs % 86400
|
||||
hours = daySecs / 3600
|
||||
hourSecs = daySecs % 3600
|
||||
mins = hourSecs / 60
|
||||
secs = hourSecs % 60
|
||||
usecs = abs(ts.microseconds())
|
||||
|
||||
if neg:
|
||||
return "-%.2d:%.2d:%.2d:%.2d.%06d" % (days, hours, mins, secs, usecs)
|
||||
else:
|
||||
return "%.2d:%.2d:%.2d:%.2d.%06d" % (days, hours, mins, secs, usecs)
|
||||
|
||||
|
||||
class ProcLatency(seiscomp.client.Application):
|
||||
def __init__(self, argc, argv):
|
||||
seiscomp.client.Application.__init__(self, argc, argv)
|
||||
|
||||
self.setMessagingEnabled(True)
|
||||
self.setDatabaseEnabled(False, False)
|
||||
|
||||
self.setAutoApplyNotifierEnabled(False)
|
||||
self.setInterpretNotifierEnabled(True)
|
||||
|
||||
self.addMessagingSubscription("PICK")
|
||||
self.addMessagingSubscription("AMPLITUDE")
|
||||
self.addMessagingSubscription("LOCATION")
|
||||
self.addMessagingSubscription("MAGNITUDE")
|
||||
self.addMessagingSubscription("EVENT")
|
||||
|
||||
self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP)
|
||||
|
||||
self._directory = ""
|
||||
self._nowDirectory = ""
|
||||
self._triggeredDirectory = ""
|
||||
self._logCreated = False
|
||||
|
||||
def createCommandLineDescription(self):
|
||||
try:
|
||||
self.commandline().addGroup("Storage")
|
||||
self.commandline().addStringOption(
|
||||
"Storage", "directory,o", "Specify the storage directory")
|
||||
except:
|
||||
seiscomp.logging.warning(
|
||||
"caught unexpected error %s" % sys.exc_info())
|
||||
|
||||
def initConfiguration(self):
|
||||
if not seiscomp.client.Application.initConfiguration(self):
|
||||
return False
|
||||
|
||||
try:
|
||||
self._directory = self.configGetString("directory")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._logCreated = self.configGetBool("logMsgLatency")
|
||||
except:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
def init(self):
|
||||
if not seiscomp.client.Application.init(self):
|
||||
return False
|
||||
|
||||
try:
|
||||
self._directory = self.commandline().optionString("directory")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if self._directory[-1] != "/":
|
||||
self._directory = self._directory + "/"
|
||||
except:
|
||||
pass
|
||||
|
||||
if self._directory:
|
||||
self._directory = seiscomp.system.Environment.Instance().absolutePath(self._directory)
|
||||
sys.stderr.write("Logging latencies to %s\n" % self._directory)
|
||||
|
||||
return True
|
||||
|
||||
def addObject(self, parentID, obj):
|
||||
try:
|
||||
self.logObject(parentID, obj, False)
|
||||
except:
|
||||
sys.stderr.write("%s\n" % traceback.format_exc())
|
||||
|
||||
def updateObject(self, parentID, obj):
|
||||
try:
|
||||
self.logObject("", obj, True)
|
||||
except:
|
||||
sys.stderr.write("%s\n" % traceback.format_exc())
|
||||
|
||||
def logObject(self, parentID, obj, update):
|
||||
now = seiscomp.core.Time.GMT()
|
||||
time = None
|
||||
|
||||
pick = seiscomp.datamodel.Pick.Cast(obj)
|
||||
if pick:
|
||||
phase = ""
|
||||
try:
|
||||
phase = pick.phaseHint().code()
|
||||
except:
|
||||
pass
|
||||
|
||||
created = None
|
||||
if self._logCreated:
|
||||
try:
|
||||
created = pick.creationInfo().creationTime()
|
||||
except:
|
||||
pass
|
||||
|
||||
self.logStation(now, created, pick.time().value(
|
||||
), pick.publicID() + ";P;" + phase, pick.waveformID(), update)
|
||||
return
|
||||
|
||||
amp = seiscomp.datamodel.Amplitude.Cast(obj)
|
||||
if amp:
|
||||
created = None
|
||||
if self._logCreated:
|
||||
try:
|
||||
created = amp.creationInfo().creationTime()
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.logStation(now, created, amp.timeWindow().reference(), amp.publicID(
|
||||
) + ";A;" + amp.type() + ";" + "%.2f" % amp.amplitude().value(), amp.waveformID(), update)
|
||||
except:
|
||||
pass
|
||||
return
|
||||
|
||||
org = seiscomp.datamodel.Origin.Cast(obj)
|
||||
if org:
|
||||
status = ""
|
||||
lat = "%.2f" % org.latitude().value()
|
||||
lon = "%.2f" % org.longitude().value()
|
||||
try:
|
||||
depth = "%d" % org.depth().value()
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
status = seiscomp.datamodel.EOriginStatusNames.name(
|
||||
org.status())
|
||||
except:
|
||||
pass
|
||||
|
||||
self.logFile(now, org.time().value(), org.publicID(
|
||||
) + ";O;" + status + ";" + lat + ";" + lon + ";" + depth, update)
|
||||
return
|
||||
|
||||
mag = seiscomp.datamodel.Magnitude.Cast(obj)
|
||||
if mag:
|
||||
count = ""
|
||||
try:
|
||||
count = "%d" % mag.stationCount()
|
||||
except:
|
||||
pass
|
||||
self.logFile(now, None, mag.publicID() + ";M;" + mag.type() +
|
||||
";" + "%.4f" % mag.magnitude().value() + ";" + count, update)
|
||||
return
|
||||
|
||||
orgref = seiscomp.datamodel.OriginReference.Cast(obj)
|
||||
if orgref:
|
||||
self.logFile(now, None, parentID + ";OR;" +
|
||||
orgref.originID(), update)
|
||||
return
|
||||
|
||||
evt = seiscomp.datamodel.Event.Cast(obj)
|
||||
if evt:
|
||||
self.logFile(now, None, evt.publicID(
|
||||
) + ";E;" + evt.preferredOriginID() + ";" + evt.preferredMagnitudeID(), update)
|
||||
return
|
||||
|
||||
def logStation(self, received, created, triggered, text, waveformID, update):
|
||||
streamID = waveformID.networkCode() + "." + waveformID.stationCode() + "." + \
|
||||
waveformID.locationCode() + "." + waveformID.channelCode()
|
||||
|
||||
aNow = received.get()
|
||||
aTriggered = triggered.get()
|
||||
|
||||
nowDirectory = self._directory + \
|
||||
"/".join(["%.2d" % i for i in aNow[1:4]]) + "/"
|
||||
triggeredDirectory = self._directory + \
|
||||
"/".join(["%.2d" % i for i in aTriggered[1:4]]) + "/"
|
||||
|
||||
logEntry = timeSpanToString(received - triggered) + ";"
|
||||
if created is not None:
|
||||
logEntry = logEntry + timeSpanToString(received - created) + ";"
|
||||
else:
|
||||
logEntry = logEntry + ";"
|
||||
|
||||
if update:
|
||||
logEntry = logEntry + "U"
|
||||
else:
|
||||
logEntry = logEntry + "A"
|
||||
|
||||
logEntry = logEntry + ";" + text
|
||||
|
||||
sys.stdout.write("%s;%s\n" % (timeToString(received), logEntry))
|
||||
|
||||
if nowDirectory != self._nowDirectory:
|
||||
if createDirectory(nowDirectory) == False:
|
||||
seiscomp.logging.error(
|
||||
"Unable to create directory %s" % nowDirectory)
|
||||
return False
|
||||
|
||||
self._nowDirectory = nowDirectory
|
||||
|
||||
self.writeLog(self._nowDirectory + streamID + ".rcv",
|
||||
timeToString(received) + ";" + logEntry)
|
||||
|
||||
if triggeredDirectory != self._triggeredDirectory:
|
||||
if createDirectory(triggeredDirectory) == False:
|
||||
seiscomp.logging.error(
|
||||
"Unable to create directory %s" % triggeredDirectory)
|
||||
return False
|
||||
|
||||
self._triggeredDirectory = triggeredDirectory
|
||||
|
||||
self.writeLog(self._triggeredDirectory + streamID +
|
||||
".trg", timeToString(triggered) + ";" + logEntry)
|
||||
|
||||
return True
|
||||
|
||||
def logFile(self, received, triggered, text, update):
|
||||
aNow = received.get()
|
||||
nowDirectory = self._directory + \
|
||||
"/".join(["%.2d" % i for i in aNow[1:4]]) + "/"
|
||||
triggeredDirectory = None
|
||||
|
||||
#logEntry = timeToString(received)
|
||||
logEntry = ""
|
||||
|
||||
if not triggered is None:
|
||||
aTriggered = triggered.get()
|
||||
triggeredDirectory = self._directory + \
|
||||
"/".join(["%.2d" % i for i in aTriggered[1:4]]) + "/"
|
||||
|
||||
logEntry = logEntry + timeSpanToString(received - triggered)
|
||||
|
||||
logEntry = logEntry + ";"
|
||||
|
||||
if update:
|
||||
logEntry = logEntry + "U"
|
||||
else:
|
||||
logEntry = logEntry + "A"
|
||||
|
||||
logEntry = logEntry + ";" + text
|
||||
|
||||
sys.stdout.write("%s;%s\n" % (timeToString(received), logEntry))
|
||||
|
||||
if nowDirectory != self._nowDirectory:
|
||||
if createDirectory(nowDirectory) == False:
|
||||
seiscomp.logging.error(
|
||||
"Unable to create directory %s" % nowDirectory)
|
||||
return False
|
||||
|
||||
self._nowDirectory = nowDirectory
|
||||
|
||||
self.writeLog(self._nowDirectory + "objects.rcv",
|
||||
timeToString(received) + ";" + logEntry)
|
||||
|
||||
if triggeredDirectory:
|
||||
if triggeredDirectory != self._triggeredDirectory:
|
||||
if createDirectory(triggeredDirectory) == False:
|
||||
seiscomp.logging.error(
|
||||
"Unable to create directory %s" % triggeredDirectory)
|
||||
return False
|
||||
|
||||
self._triggeredDirectory = triggeredDirectory
|
||||
|
||||
self.writeLog(self._triggeredDirectory + "objects.trg",
|
||||
timeToString(triggered) + ";" + logEntry)
|
||||
|
||||
return True
|
||||
|
||||
def writeLog(self, file, text):
|
||||
of = open(file, "a")
|
||||
if of:
|
||||
of.write(text)
|
||||
of.write("\n")
|
||||
of.close()
|
||||
|
||||
|
||||
app = ProcLatency(len(sys.argv), sys.argv)
|
||||
sys.exit(app())
|
Binary file not shown.
@ -0,0 +1,252 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) 2021 by gempa GmbH #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
# #
|
||||
# adopted from scqcquery #
|
||||
# Author: Dirk Roessler, gempa GmbH #
|
||||
# Email: roessler@gempa.de #
|
||||
############################################################################
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import sys
|
||||
import re
|
||||
import seiscomp.core
|
||||
import seiscomp.client
|
||||
import seiscomp.io
|
||||
import seiscomp.datamodel
|
||||
|
||||
qcParamsDefault = "latency,delay,timing,offset,rms,availability,"\
|
||||
"'gaps count','gaps interval','gaps length',"\
|
||||
"'overlaps count','overlaps interval','overlaps length',"\
|
||||
"'spikes count','spikes interval','spikes amplitude'"
|
||||
|
||||
|
||||
def getStreamsFromInventory(self):
|
||||
try:
|
||||
dbr = seiscomp.datamodel.DatabaseReader(self.database())
|
||||
inv = seiscomp.datamodel.Inventory()
|
||||
dbr.loadNetworks(inv)
|
||||
|
||||
streamList = set()
|
||||
for inet in range(inv.networkCount()):
|
||||
network = inv.network(inet)
|
||||
dbr.load(network)
|
||||
for ista in range(network.stationCount()):
|
||||
station = network.station(ista)
|
||||
try:
|
||||
start = station.start()
|
||||
except Exception:
|
||||
continue
|
||||
try:
|
||||
end = station.end()
|
||||
if not start <= self._end <= end and end >= self._start:
|
||||
continue
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
for iloc in range(station.sensorLocationCount()):
|
||||
location = station.sensorLocation(iloc)
|
||||
for istr in range(location.streamCount()):
|
||||
stream = location.stream(istr)
|
||||
streamID = network.code() + "." + station.code() \
|
||||
+ "." + location.code() + "." + stream.code()
|
||||
streamList.add(streamID)
|
||||
|
||||
return list(streamList)
|
||||
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
class WfqQuery(seiscomp.client.Application):
|
||||
|
||||
def __init__(self, argc, argv):
|
||||
seiscomp.client.Application.__init__(self, argc, argv)
|
||||
|
||||
self.setMessagingEnabled(False)
|
||||
self.setDatabaseEnabled(True, False)
|
||||
self.setLoggingToStdErr(True)
|
||||
self.setDaemonEnabled(False)
|
||||
|
||||
self._streams = False
|
||||
self._fromInventory = False
|
||||
self._outfile = '-'
|
||||
self._parameter = qcParamsDefault
|
||||
self._start = "1900-01-01T00:00:00Z"
|
||||
self._end = str(seiscomp.core.Time.GMT())
|
||||
self._formatted = False
|
||||
|
||||
def createCommandLineDescription(self):
|
||||
self.commandline().addGroup("Output")
|
||||
self.commandline().addStringOption("Output", "output,o",
|
||||
"output file name for XML. Writes "
|
||||
"to stdout if not given.")
|
||||
self.commandline().addOption("Output", "formatted,f",
|
||||
"write formatted XML")
|
||||
|
||||
self.commandline().addGroup("Query")
|
||||
self.commandline().addStringOption(
|
||||
"Query", "begin,b", "Begin time of query: 'YYYY-MM-DD hh:mm:ss'")
|
||||
self.commandline().addStringOption(
|
||||
"Query", "end,e", "End time of query: 'YYYY-MM-DD hh:mm:ss'")
|
||||
self.commandline().addStringOption(
|
||||
"Query", "stream-id,i",
|
||||
"Waveform stream ID to search for QC parameters: net.sta.loc.cha -"
|
||||
" [networkCode].[stationCode].[sensorLocationCode].[channelCode]. "
|
||||
"Provide a single ID or a comma-separated list. Overrides "
|
||||
"--streams-from-inventory")
|
||||
self.commandline().addStringOption(
|
||||
"Query", "parameter,p",
|
||||
"QC parameter to output: (e.g. delay, rms, 'gaps count' ...). "
|
||||
"Provide a single parameter or a comma-separated list. Defaults "
|
||||
"apply if parameter is not given.")
|
||||
self.commandline().addOption("Query", "streams-from-inventory",
|
||||
"Read streams from inventory. Superseded"
|
||||
" by stream-id.")
|
||||
|
||||
|
||||
return True
|
||||
|
||||
def printUsage(self):
|
||||
print('''Usage:
|
||||
scqueryqc [options]
|
||||
|
||||
Query a database for waveform quality control (QC) parameters.''', file=sys.stderr)
|
||||
|
||||
seiscomp.client.Application.printUsage(self)
|
||||
|
||||
print('''Default QC parameters: {}
|
||||
'''.format(qcParamsDefault), file=sys.stderr)
|
||||
print('''Examples:
|
||||
Query rms and delay values for streams 'AU.AS18..SHZ' and 'AU.AS19..SHZ' from '2021-11-20 00:00:00' until current
|
||||
scqueryqc -d localhost -b '2021-11-20 00:00:00' -p rms,delay -i AU.AS18..SHZ,AU.AS19..SHZ
|
||||
''', file=sys.stderr)
|
||||
|
||||
def validateParameters(self):
|
||||
if not seiscomp.client.Application.validateParameters(self):
|
||||
return False
|
||||
|
||||
try:
|
||||
self._streams = self.commandline().optionString("stream-id").split(",")
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._fromInventory = self.commandline().hasOption("streams-from-inventory")
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
if not self._streams and not self._fromInventory:
|
||||
print("Provide streamID(s): --stream-id or --streams-from-inventory",
|
||||
file=sys.stderr)
|
||||
return False
|
||||
|
||||
try:
|
||||
self._outfile = self.commandline().optionString("output")
|
||||
except RuntimeError:
|
||||
print("No output file name given: Sending to stdout",
|
||||
file=sys.stderr)
|
||||
|
||||
try:
|
||||
self._start = self.commandline().optionString("begin")
|
||||
except RuntimeError:
|
||||
print("No begin time given, considering: {}".format(self._start),
|
||||
file=sys.stderr)
|
||||
|
||||
try:
|
||||
self._end = self.commandline().optionString("end")
|
||||
except RuntimeError:
|
||||
print("No end time given, considering 'now': {}".format(self._end),
|
||||
file=sys.stderr)
|
||||
|
||||
try:
|
||||
self._parameter = self.commandline().optionString("parameter")
|
||||
except RuntimeError:
|
||||
print("No QC parameter given, using default", file=sys.stderr)
|
||||
|
||||
try:
|
||||
self._formatted = self.commandline().hasOption("formatted")
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
def run(self):
|
||||
if not self.query():
|
||||
print("No database connection!\n", file=sys.stderr)
|
||||
return False
|
||||
|
||||
streams = self._streams
|
||||
if not streams and self._fromInventory:
|
||||
try:
|
||||
streams = getStreamsFromInventory(self)
|
||||
except RuntimeError:
|
||||
print("No streams read from database!\n", file=sys.stderr)
|
||||
return False
|
||||
|
||||
if not streams:
|
||||
print("Empty stream list")
|
||||
return False
|
||||
|
||||
for stream in streams:
|
||||
if re.search("[*?]", stream):
|
||||
print("Wildcards in streamID are not supported: {}\n"
|
||||
.format(stream), file=sys.stderr)
|
||||
return False
|
||||
|
||||
print("Request:", file=sys.stderr)
|
||||
print(" streams: {}".format(str(streams)), file=sys.stderr)
|
||||
print(" number of streams: {}".format(len(streams)), file=sys.stderr)
|
||||
print(" begin time: {}".format(str(self._start)), file=sys.stderr)
|
||||
print(" end time: {}".format(str(self._end)), file=sys.stderr)
|
||||
print(" parameters: {}".format(str(self._parameter)),
|
||||
file=sys.stderr)
|
||||
print("Output:", file=sys.stderr)
|
||||
print(" file: {}".format(self._outfile), file=sys.stderr)
|
||||
print(" formatted XML: {}".format(self._formatted), file=sys.stderr)
|
||||
|
||||
# create archive
|
||||
xarc = seiscomp.io.XMLArchive()
|
||||
if not xarc.create(self._outfile, True, True):
|
||||
print("Unable to write XML to {}!\n".format(self._outfile),
|
||||
file=sys.stderr)
|
||||
return False
|
||||
xarc.setFormattedOutput(self._formatted)
|
||||
qc = seiscomp.datamodel.QualityControl()
|
||||
|
||||
# write parameters
|
||||
for parameter in self._parameter.split(","):
|
||||
for stream in streams:
|
||||
(net, sta, loc, cha) = stream.split(".")
|
||||
it = self.query().getWaveformQuality(seiscomp.datamodel.WaveformStreamID(net, sta, loc, cha, ""),
|
||||
parameter,
|
||||
seiscomp.core.Time.FromString(
|
||||
self._start, "%Y-%m-%d %H:%M:%S"),
|
||||
seiscomp.core.Time.FromString(self._end, "%Y-%m-%d %H:%M:%S"))
|
||||
|
||||
while it.get():
|
||||
try:
|
||||
wfq = seiscomp.datamodel.WaveformQuality.Cast(it.get())
|
||||
qc.add(wfq)
|
||||
except Exception:
|
||||
pass
|
||||
it.step()
|
||||
|
||||
xarc.writeObject(qc)
|
||||
xarc.close()
|
||||
return True
|
||||
|
||||
|
||||
app = WfqQuery(len(sys.argv), sys.argv)
|
||||
sys.exit(app())
|
Binary file not shown.
Binary file not shown.
@ -0,0 +1,83 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
import sys
|
||||
import seiscomp.core
|
||||
import seiscomp.client
|
||||
import seiscomp.datamodel
|
||||
|
||||
|
||||
class SendJournal(seiscomp.client.Application):
|
||||
def __init__(self, argc, argv):
|
||||
seiscomp.client.Application.__init__(self, argc, argv)
|
||||
self.setDatabaseEnabled(False, False)
|
||||
self.setMessagingEnabled(True)
|
||||
self.setMessagingUsername("")
|
||||
self.setPrimaryMessagingGroup("EVENT")
|
||||
|
||||
def init(self):
|
||||
if not seiscomp.client.Application.init(self):
|
||||
return False
|
||||
self.params = self.commandline().unrecognizedOptions()
|
||||
if len(self.params) < 2:
|
||||
sys.stderr.write(
|
||||
self.name() + " [opts] {objectID} {action} [parameters]\n")
|
||||
return False
|
||||
return True
|
||||
|
||||
def printUsage(self):
|
||||
|
||||
print('''Usage:
|
||||
scsendjournal [options]
|
||||
|
||||
Send journaling information to the messaging to manipulate event parameters''')
|
||||
|
||||
seiscomp.client.Application.printUsage(self)
|
||||
|
||||
print('''Examples:
|
||||
Set the type of the event with ID gempa2021abcd to 'earthquake'
|
||||
scsendjournal -H localhost gempa2021abcd EvType "earthquake"
|
||||
''')
|
||||
|
||||
def run(self):
|
||||
msg = seiscomp.datamodel.NotifierMessage()
|
||||
|
||||
entry = seiscomp.datamodel.JournalEntry()
|
||||
entry.setCreated(seiscomp.core.Time.GMT())
|
||||
entry.setObjectID(self.params[0])
|
||||
entry.setSender(self.author())
|
||||
entry.setAction(self.params[1])
|
||||
|
||||
sys.stderr.write(
|
||||
"Sending entry (" + entry.objectID() + "," + entry.action() + ")\n")
|
||||
|
||||
if len(self.params) > 2:
|
||||
entry.setParameters(self.params[2])
|
||||
|
||||
n = seiscomp.datamodel.Notifier(
|
||||
seiscomp.datamodel.Journaling.ClassName(), seiscomp.datamodel.OP_ADD, entry)
|
||||
msg.attach(n)
|
||||
self.connection().send(msg)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main(argc, argv):
|
||||
app = SendJournal(argc, argv)
|
||||
return app()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(len(sys.argv), sys.argv))
|
@ -0,0 +1,94 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
import sys
|
||||
import seiscomp.core
|
||||
import seiscomp.datamodel
|
||||
import seiscomp.client
|
||||
import seiscomp.logging
|
||||
|
||||
|
||||
class SendOrigin(seiscomp.client.Application):
|
||||
|
||||
def __init__(self, argc, argv):
|
||||
seiscomp.client.Application.__init__(self, argc, argv)
|
||||
self.setDatabaseEnabled(False, False)
|
||||
self.setMessagingEnabled(True)
|
||||
self.setPrimaryMessagingGroup("GUI")
|
||||
|
||||
def init(self):
|
||||
if not seiscomp.client.Application.init(self):
|
||||
return False
|
||||
|
||||
try:
|
||||
cstr = self.commandline().optionString("coord")
|
||||
tstr = self.commandline().optionString("time")
|
||||
except:
|
||||
sys.stderr.write(
|
||||
"Must specify origin using '--coord lat,lon,dep --time time'\n")
|
||||
return False
|
||||
|
||||
self.origin = seiscomp.datamodel.Origin.Create()
|
||||
|
||||
ci = seiscomp.datamodel.CreationInfo()
|
||||
ci.setAgencyID(self.agencyID())
|
||||
ci.setCreationTime(seiscomp.core.Time.GMT())
|
||||
self.origin.setCreationInfo(ci)
|
||||
|
||||
lat, lon, dep = list(map(float, cstr.split(",")))
|
||||
self.origin.setLongitude(seiscomp.datamodel.RealQuantity(lon))
|
||||
self.origin.setLatitude(seiscomp.datamodel.RealQuantity(lat))
|
||||
self.origin.setDepth(seiscomp.datamodel.RealQuantity(dep))
|
||||
|
||||
time = seiscomp.core.Time()
|
||||
time.fromString(tstr.replace("/", "-") + ":0:0", "%F %T")
|
||||
self.origin.setTime(seiscomp.datamodel.TimeQuantity(time))
|
||||
|
||||
return True
|
||||
|
||||
def createCommandLineDescription(self):
|
||||
try:
|
||||
self.commandline().addGroup("Parameters")
|
||||
self.commandline().addStringOption("Parameters",
|
||||
"coord",
|
||||
"Latitude,longitude,depth of origin")
|
||||
self.commandline().addStringOption("Parameters",
|
||||
"time", "time of origin")
|
||||
except:
|
||||
seiscomp.logging.warning("caught unexpected error %s" % sys.exc_info())
|
||||
|
||||
def printUsage(self):
|
||||
print('''Usage:
|
||||
scsendorigin [options]
|
||||
|
||||
Create an artificial origin and send to the messaging''')
|
||||
|
||||
seiscomp.client.Application.printUsage(self)
|
||||
|
||||
print('''Examples:
|
||||
Send an artificial origin with hypocenter parameters to the messaging
|
||||
scsendorigin --time "2022-05-01 10:00:00" --coord 52,12,10
|
||||
''')
|
||||
|
||||
def run(self):
|
||||
msg = seiscomp.datamodel.ArtificialOriginMessage(self.origin)
|
||||
self.connection().send(msg)
|
||||
return True
|
||||
|
||||
|
||||
app = SendOrigin(len(sys.argv), sys.argv)
|
||||
# app.setName("scsendorigin")
|
||||
app.setMessagingUsername("scsendorg")
|
||||
sys.exit(app())
|
Binary file not shown.
@ -0,0 +1,395 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
import sys, os, re
|
||||
import seiscomp.core, seiscomp.client, seiscomp.logging, seiscomp.system
|
||||
|
||||
|
||||
"""
|
||||
Monitor application that connects to the messaging and collects all
|
||||
information on the STATUS_GROUP to create an XML file ever N seconds.
|
||||
It can furthermore call a configured script to trigger processing of the
|
||||
produced XML file.
|
||||
"""
|
||||
|
||||
inputRegEx = re.compile("in\((?P<params>[^\)]*)\)")
|
||||
outputRegEx = re.compile("out\((?P<params>[^\)]*)\)")
|
||||
|
||||
|
||||
# Define all units of measure for available system SOH tags. Tags that are
|
||||
# not given here are not processed.
|
||||
Tests = {
|
||||
"cpuusage": "%",
|
||||
"clientmemoryusage": "kB",
|
||||
"sentmessages": "cnt",
|
||||
"receivedmessages": "cnt",
|
||||
"messagequeuesize": "cnt",
|
||||
"objectcount": "cnt",
|
||||
"uptime": "s",
|
||||
"dbadds": "row/s",
|
||||
"dbupdates": "row/s",
|
||||
"dbdeletes": "row/s"
|
||||
}
|
||||
|
||||
|
||||
#----------------------------------------------------------------------------
|
||||
# Class TestLog to hold the properties of a test. It also creates XML.
|
||||
#----------------------------------------------------------------------------
|
||||
class TestLog:
|
||||
def __init__(self):
|
||||
self.value = None
|
||||
self.uom = None
|
||||
self.update = None
|
||||
|
||||
def toXML(self, f, name):
|
||||
f.write('<test name="%s"' % name)
|
||||
if self.value:
|
||||
try:
|
||||
# Try to convert to float
|
||||
fvalue = float(self.value)
|
||||
if fvalue % 1.0 >= 1E-6:
|
||||
f.write(' value="%f"' % fvalue)
|
||||
else:
|
||||
f.write(' value="%d"' % int(fvalue))
|
||||
except:
|
||||
f.write(' value="%s"' % self.value)
|
||||
if self.uom:
|
||||
f.write(' uom="%s"' % self.uom)
|
||||
if self.update:
|
||||
f.write(' updateTime="%s"' % self.update)
|
||||
f.write('/>')
|
||||
|
||||
|
||||
#----------------------------------------------------------------------------
|
||||
# Class ObjectLog to hold the properties of a object log. It also creates
|
||||
# XML.
|
||||
#----------------------------------------------------------------------------
|
||||
class ObjectLog:
|
||||
def __init__(self):
|
||||
self.count = None
|
||||
self.average = None
|
||||
self.timeWindow = None
|
||||
self.last = None
|
||||
self.update = None
|
||||
|
||||
def toXML(self, f, name, channel):
|
||||
f.write('<object')
|
||||
if name:
|
||||
f.write(' name="%s"' % name)
|
||||
if channel:
|
||||
f.write(' channel="%s"' % channel)
|
||||
if not self.count is None:
|
||||
f.write(' count="%s"' % self.count)
|
||||
if not self.timeWindow is None:
|
||||
f.write(' timeWindow="%s"' % self.timeWindow)
|
||||
if not self.average is None:
|
||||
f.write(' average="%s"' % self.average)
|
||||
if self.last:
|
||||
f.write(' lastTime="%s"' % self.last)
|
||||
f.write(' updateTime="%s"' % self.update)
|
||||
f.write('/>')
|
||||
|
||||
|
||||
#----------------------------------------------------------------------------
|
||||
# Class Client that holds all tests and object logs of a particular client
|
||||
# (messaging user name).
|
||||
#----------------------------------------------------------------------------
|
||||
class Client:
|
||||
def __init__(self):
|
||||
self.pid = None
|
||||
self.progname = None
|
||||
self.host = None
|
||||
|
||||
self.inputLogs = dict()
|
||||
self.outputLogs = dict()
|
||||
self.tests = dict()
|
||||
|
||||
#----------------------------------------------------------------------------
|
||||
# Update/add (system) tests based on the passed tests dictionary retrieved
|
||||
# from a status message.
|
||||
#----------------------------------------------------------------------------
|
||||
def updateTests(self, updateTime, tests):
|
||||
for name, value in list(tests.items()):
|
||||
if name == "pid":
|
||||
self.pid = value
|
||||
elif name == "programname":
|
||||
self.progname = value
|
||||
elif name == "hostname":
|
||||
self.host = value
|
||||
|
||||
if name not in Tests:
|
||||
continue
|
||||
|
||||
# Convert d:h:m:s to seconds
|
||||
if name == "uptime":
|
||||
try:
|
||||
t = [int(v) for v in value.split(":")]
|
||||
except:
|
||||
continue
|
||||
if len(t) != 4:
|
||||
continue
|
||||
value = str(t[0]*86400+t[1]*3600+t[2]*60+t[3])
|
||||
|
||||
if name not in self.tests:
|
||||
log = TestLog()
|
||||
log.uom = Tests[name]
|
||||
self.tests[name] = log
|
||||
else:
|
||||
log = self.tests[name]
|
||||
log.value = value
|
||||
log.update = updateTime
|
||||
|
||||
#----------------------------------------------------------------------------
|
||||
# Update/add object logs based on the passed log text. The content is parsed.
|
||||
#----------------------------------------------------------------------------
|
||||
def updateObjects(self, updateTime, log):
|
||||
# Check input structure
|
||||
v = inputRegEx.search(log)
|
||||
if not v:
|
||||
# Check out structure
|
||||
v = outputRegEx.search(log)
|
||||
if not v:
|
||||
return
|
||||
logs = self.outputLogs
|
||||
else:
|
||||
logs = self.inputLogs
|
||||
|
||||
try:
|
||||
tmp = v.group('params').split(',')
|
||||
except:
|
||||
return
|
||||
|
||||
params = dict()
|
||||
for p in tmp:
|
||||
try:
|
||||
param, value = p.split(':', 1)
|
||||
except:
|
||||
continue
|
||||
params[param] = value
|
||||
|
||||
name = params.get("name", "")
|
||||
channel = params.get("chan", "")
|
||||
if (name, channel) not in logs:
|
||||
logObj = ObjectLog()
|
||||
logs[(name, channel)] = logObj
|
||||
else:
|
||||
logObj = logs[(name, channel)]
|
||||
|
||||
logObj.update = updateTime
|
||||
logObj.count = params.get("cnt")
|
||||
logObj.average = params.get("avg")
|
||||
logObj.timeWindow = params.get("tw")
|
||||
logObj.last = params.get("last")
|
||||
|
||||
def toXML(self, f, name):
|
||||
f.write('<service name="%s"' % name)
|
||||
if self.host:
|
||||
f.write(' host="%s"' % self.host)
|
||||
if self.pid:
|
||||
f.write(' pid="%s"' % self.pid)
|
||||
if self.progname:
|
||||
f.write(' prog="%s"' % self.progname)
|
||||
f.write('>')
|
||||
for name, log in list(self.tests.items()):
|
||||
log.toXML(f, name)
|
||||
if len(self.inputLogs) > 0:
|
||||
f.write('<input>')
|
||||
for id, log in list(self.inputLogs.items()):
|
||||
log.toXML(f, id[0], id[1])
|
||||
f.write('</input>')
|
||||
if len(self.outputLogs) > 0:
|
||||
f.write('<output>')
|
||||
for id, log in list(self.outputLogs.items()):
|
||||
log.toXML(f, id[0], id[1])
|
||||
f.write('</output>')
|
||||
f.write("</service>")
|
||||
|
||||
|
||||
#----------------------------------------------------------------------------
|
||||
# SC3 application class Monitor
|
||||
#----------------------------------------------------------------------------
|
||||
class Monitor(seiscomp.client.Application):
|
||||
def __init__(self, argc, argv):
|
||||
seiscomp.client.Application.__init__(self, argc, argv)
|
||||
self.setDatabaseEnabled(False, False)
|
||||
self.setMembershipMessagesEnabled(True);
|
||||
self.addMessagingSubscription(seiscomp.client.Protocol.STATUS_GROUP)
|
||||
self.setMessagingUsername("")
|
||||
self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP)
|
||||
self._clients = dict()
|
||||
self._outputScript = None
|
||||
self._outputFile = "@LOGDIR@/server.xml"
|
||||
self._outputInterval = 60
|
||||
|
||||
def createCommandLineDescription(self):
|
||||
try:
|
||||
self.commandline().addGroup("Output")
|
||||
self.commandline().addStringOption("Output", "file,o",
|
||||
"Specify the output file to create")
|
||||
self.commandline().addIntOption("Output", "interval,i",
|
||||
"Specify the output interval in seconds (default: 60)")
|
||||
self.commandline().addStringOption("Output", "script",
|
||||
"Specify an output script to be called after the output file is generated")
|
||||
except:
|
||||
seiscomp.logging.warning(
|
||||
"caught unexpected error %s" % sys.exc_info())
|
||||
return True
|
||||
|
||||
def initConfiguration(self):
|
||||
if not seiscomp.client.Application.initConfiguration(self):
|
||||
return False
|
||||
|
||||
try:
|
||||
self._outputFile = self.configGetString("monitor.output.file")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._outputInterval = self.configGetInt("monitor.output.interval")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._outputScript = self.configGetString("monitor.output.script")
|
||||
except:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
def init(self):
|
||||
if not seiscomp.client.Application.init(self):
|
||||
return False
|
||||
|
||||
try:
|
||||
self._outputFile = self.commandline().optionString("file")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._outputInterval = self.commandline().optionInt("interval")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._outputScript = self.commandline().optionString("script")
|
||||
except:
|
||||
pass
|
||||
|
||||
self._outputFile = seiscomp.system.Environment.Instance().absolutePath(self._outputFile)
|
||||
seiscomp.logging.info("Output file: %s" % self._outputFile)
|
||||
|
||||
if self._outputScript:
|
||||
self._outputScript = seiscomp.system.Environment.Instance().absolutePath(self._outputScript)
|
||||
seiscomp.logging.info("Output script: %s" % self._outputScript)
|
||||
|
||||
self._monitor = self.addInputObjectLog("status", seiscomp.client.Protocol.STATUS_GROUP)
|
||||
self.enableTimer(self._outputInterval)
|
||||
seiscomp.logging.info("Starting output timer with %d secs" % self._outputInterval)
|
||||
|
||||
return True
|
||||
|
||||
def printUsage(self):
|
||||
print('''Usage:
|
||||
scsohlog [options]
|
||||
|
||||
Connect to the messaging collecting information sent from connected clients''')
|
||||
|
||||
seiscomp.client.Application.printUsage(self)
|
||||
|
||||
print('''Examples:
|
||||
Create an output XML file every 60 seconds and execute a custom script to process the XML file
|
||||
scsohlog -o stat.xml -i 60 --script process-stat.sh
|
||||
''')
|
||||
def handleNetworkMessage(self, msg):
|
||||
# A state of health message
|
||||
if msg.type == seiscomp.client.Packet.Status:
|
||||
data = filter(None, msg.payload.split("&"))
|
||||
self.updateStatus(msg.subject, data)
|
||||
|
||||
# If a client disconnected, remove it from the list
|
||||
elif msg.type == seiscomp.client.Packet.Disconnected:
|
||||
if msg.subject in self._clients:
|
||||
del self._clients[msg.subject]
|
||||
|
||||
def handleDisconnect(self):
|
||||
# If we got disconnected all client states are deleted
|
||||
self._clients = dict()
|
||||
|
||||
#----------------------------------------------------------------------------
|
||||
# Timeout handler called by the Application class.
|
||||
# Write XML to configured output file and trigger configured script.
|
||||
#----------------------------------------------------------------------------
|
||||
def handleTimeout(self):
|
||||
if self._outputFile == "-":
|
||||
self.toXML(sys.stdout)
|
||||
sys.stdout.write("\n")
|
||||
return
|
||||
|
||||
try:
|
||||
f = open(self._outputFile, "w")
|
||||
except:
|
||||
seiscomp.logging.error(
|
||||
"Unable to create output file: %s" % self._outputFile)
|
||||
return
|
||||
|
||||
self.toXML(f)
|
||||
f.close()
|
||||
|
||||
if self._outputScript:
|
||||
os.system(self._outputScript + " " + self._outputFile)
|
||||
|
||||
#----------------------------------------------------------------------------
|
||||
# Write XML to stream f
|
||||
#----------------------------------------------------------------------------
|
||||
def toXML(self, f):
|
||||
f.write('<?xml version="1.0" encoding="UTF-8"?>')
|
||||
f.write('<server name="seiscomp" host="%s">' % self.messagingURL())
|
||||
for name, client in list(self._clients.items()):
|
||||
client.toXML(f, name)
|
||||
f.write('</server>')
|
||||
|
||||
def updateStatus(self, name, items):
|
||||
if name not in self._clients:
|
||||
self._clients[name] = Client()
|
||||
|
||||
now = seiscomp.core.Time.GMT()
|
||||
client = self._clients[name]
|
||||
self.logObject(self._monitor, now)
|
||||
|
||||
params = dict()
|
||||
objs = []
|
||||
|
||||
for t in items:
|
||||
try:
|
||||
param, value = t.split("=", 1)
|
||||
params[param] = value
|
||||
except:
|
||||
objs.append(t)
|
||||
|
||||
if "time" in params:
|
||||
update = params["time"]
|
||||
del params["time"]
|
||||
else:
|
||||
update = now.iso()
|
||||
|
||||
client.updateTests(update, params)
|
||||
for o in objs:
|
||||
client.updateObjects(update, o)
|
||||
#client.toXML(sys.stdout, name)
|
||||
|
||||
|
||||
app = Monitor(len(sys.argv), sys.argv)
|
||||
sys.exit(app())
|
||||
|
@ -0,0 +1,502 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
############################################################################
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
import traceback
|
||||
|
||||
from seiscomp import (client, core, datamodel, logging, seismology, system,
|
||||
math)
|
||||
|
||||
class VoiceAlert(client.Application):
|
||||
|
||||
def __init__(self, argc, argv):
|
||||
client.Application.__init__(self, argc, argv)
|
||||
|
||||
self.setMessagingEnabled(True)
|
||||
self.setDatabaseEnabled(True, True)
|
||||
self.setLoadRegionsEnabled(True)
|
||||
self.setMessagingUsername("")
|
||||
self.setPrimaryMessagingGroup(client.Protocol.LISTENER_GROUP)
|
||||
self.addMessagingSubscription("EVENT")
|
||||
self.addMessagingSubscription("LOCATION")
|
||||
self.addMessagingSubscription("MAGNITUDE")
|
||||
|
||||
self.setAutoApplyNotifierEnabled(True)
|
||||
self.setInterpretNotifierEnabled(True)
|
||||
|
||||
self.setLoadCitiesEnabled(True)
|
||||
self.setLoadRegionsEnabled(True)
|
||||
|
||||
self._ampType = "snr"
|
||||
self._citiesMaxDist = 20
|
||||
self._citiesMinPopulation = 50000
|
||||
|
||||
self._cache = None
|
||||
self._eventDescriptionPattern = None
|
||||
self._ampScript = None
|
||||
self._alertScript = None
|
||||
self._eventScript = None
|
||||
|
||||
self._ampProc = None
|
||||
self._alertProc = None
|
||||
self._eventProc = None
|
||||
|
||||
self._newWhenFirstSeen = False
|
||||
self._prevMessage = {}
|
||||
self._agencyIDs = []
|
||||
|
||||
def createCommandLineDescription(self):
|
||||
self.commandline().addOption(
|
||||
"Generic", "first-new", "calls an event a new event when it is "
|
||||
"seen the first time")
|
||||
self.commandline().addGroup("Alert")
|
||||
self.commandline().addStringOption(
|
||||
"Alert", "amp-type", "specify the amplitude type to listen to",
|
||||
self._ampType)
|
||||
self.commandline().addStringOption(
|
||||
"Alert", "amp-script", "specify the script to be called when a "
|
||||
"stationamplitude arrived, network-, stationcode and amplitude are "
|
||||
"passed as parameters $1, $2 and $3")
|
||||
self.commandline().addStringOption(
|
||||
"Alert", "alert-script", "specify the script to be called when a "
|
||||
"preliminary origin arrived, latitude and longitude are passed as "
|
||||
"parameters $1 and $2")
|
||||
self.commandline().addStringOption(
|
||||
"Alert", "event-script", "specify the script to be called when an "
|
||||
"event has been declared; the message string, a flag (1=new event, "
|
||||
"0=update event), the EventID, the arrival count and the magnitude "
|
||||
"(optional when set) are passed as parameter $1, $2, $3, $4 and $5")
|
||||
self.commandline().addGroup("Cities")
|
||||
self.commandline().addStringOption(
|
||||
"Cities", "max-dist", "maximum distance for using the distance "
|
||||
"from a city to the earthquake")
|
||||
self.commandline().addStringOption(
|
||||
"Cities", "min-population", "minimum population for a city to "
|
||||
"become a point of interest")
|
||||
self.commandline().addGroup("Debug")
|
||||
self.commandline().addStringOption(
|
||||
"Debug", "eventid,E", "specify Event ID")
|
||||
return True
|
||||
|
||||
def init(self):
|
||||
if not client.Application.init(self):
|
||||
return False
|
||||
|
||||
try:
|
||||
self._newWhenFirstSeen = self.configGetBool("firstNew")
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
try:
|
||||
agencyIDs = self.configGetStrings("agencyIDs")
|
||||
for item in agencyIDs:
|
||||
item = item.strip()
|
||||
if item not in self._agencyIDs:
|
||||
self._agencyIDs.append(item)
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
try:
|
||||
if self.commandline().hasOption("first-new"):
|
||||
self._newWhenFirstSeen = True
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._eventDescriptionPattern = self.configGetString("poi.message")
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._citiesMaxDist = self.configGetDouble("poi.maxDist")
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._citiesMaxDist = self.commandline().optionDouble("max-dist")
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._citiesMinPopulation = self.configGetInt("poi.minPopulation")
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._citiesMinPopulation = self.commandline().optionInt("min-population")
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._ampType = self.commandline().optionString("amp-type")
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._ampScript = self.commandline().optionString("amp-script")
|
||||
except BaseException:
|
||||
try:
|
||||
self._ampScript = self.configGetString("scripts.amplitude")
|
||||
except BaseException:
|
||||
logging.warning("No amplitude script defined")
|
||||
|
||||
if self._ampScript:
|
||||
self._ampScript = system.Environment.Instance().absolutePath(self._ampScript)
|
||||
|
||||
try:
|
||||
self._alertScript = self.commandline().optionString("alert-script")
|
||||
except BaseException:
|
||||
try:
|
||||
self._alertScript = self.configGetString("scripts.alert")
|
||||
except BaseException:
|
||||
logging.warning("No alert script defined")
|
||||
|
||||
if self._alertScript:
|
||||
self._alertScript = system.Environment.Instance(
|
||||
).absolutePath(self._alertScript)
|
||||
|
||||
try:
|
||||
self._eventScript = self.commandline().optionString("event-script")
|
||||
except BaseException:
|
||||
try:
|
||||
self._eventScript = self.configGetString("scripts.event")
|
||||
logging.info(
|
||||
"Using event script: %s" % self._eventScript)
|
||||
except BaseException:
|
||||
logging.warning("No event script defined")
|
||||
|
||||
if self._eventScript:
|
||||
self._eventScript = system.Environment.Instance() \
|
||||
.absolutePath(self._eventScript)
|
||||
|
||||
logging.info("Creating ringbuffer for 100 objects")
|
||||
if not self.query():
|
||||
logging.warning(
|
||||
"No valid database interface to read from")
|
||||
self._cache = datamodel.PublicObjectRingBuffer(
|
||||
self.query(), 100)
|
||||
|
||||
if self._ampScript and self.connection():
|
||||
self.connection().subscribe("AMPLITUDE")
|
||||
|
||||
if self._newWhenFirstSeen:
|
||||
logging.info(
|
||||
"A new event is declared when I see it the first time")
|
||||
|
||||
if not self._agencyIDs:
|
||||
logging.info("agencyIDs: []")
|
||||
else:
|
||||
logging.info(
|
||||
"agencyIDs: %s" % (" ".join(self._agencyIDs)))
|
||||
|
||||
return True
|
||||
|
||||
def printUsage(self):
|
||||
|
||||
print('''Usage:
|
||||
scvoice [options]
|
||||
|
||||
Alert the user acoustically in real time.
|
||||
''')
|
||||
|
||||
client.Application.printUsage(self)
|
||||
|
||||
print('''Examples:
|
||||
Execute scvoice on command line with debug output
|
||||
scvoice --debug
|
||||
''')
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
try:
|
||||
eventID = self.commandline().optionString("eventid")
|
||||
event = self._cache.get(datamodel.Event, eventID)
|
||||
if event:
|
||||
self.notifyEvent(event)
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
return client.Application.run(self)
|
||||
except BaseException:
|
||||
info = traceback.format_exception(*sys.exc_info())
|
||||
for i in info:
|
||||
sys.stderr.write(i)
|
||||
return False
|
||||
|
||||
def runAmpScript(self, net, sta, amp):
|
||||
if not self._ampScript:
|
||||
return
|
||||
|
||||
if self._ampProc is not None:
|
||||
if self._ampProc.poll() is None:
|
||||
logging.warning(
|
||||
"AmplitudeScript still in progress -> skipping message")
|
||||
return
|
||||
try:
|
||||
self._ampProc = subprocess.Popen(
|
||||
[self._ampScript, net, sta, "%.2f" % amp])
|
||||
logging.info(
|
||||
"Started amplitude script with pid %d" % self._ampProc.pid)
|
||||
except BaseException:
|
||||
logging.error(
|
||||
"Failed to start amplitude script '%s'" % self._ampScript)
|
||||
|
||||
def runAlert(self, lat, lon):
|
||||
if not self._alertScript:
|
||||
return
|
||||
|
||||
if self._alertProc is not None:
|
||||
if self._alertProc.poll() is None:
|
||||
logging.warning(
|
||||
"AlertScript still in progress -> skipping message")
|
||||
return
|
||||
try:
|
||||
self._alertProc = subprocess.Popen(
|
||||
[self._alertScript, "%.1f" % lat, "%.1f" % lon])
|
||||
logging.info(
|
||||
"Started alert script with pid %d" % self._alertProc.pid)
|
||||
except BaseException:
|
||||
logging.error(
|
||||
"Failed to start alert script '%s'" % self._alertScript)
|
||||
|
||||
def handleMessage(self, msg):
|
||||
try:
|
||||
dm = core.DataMessage.Cast(msg)
|
||||
if dm:
|
||||
for att in dm:
|
||||
org = datamodel.Origin.Cast(att)
|
||||
if not org:
|
||||
continue
|
||||
|
||||
try:
|
||||
if org.evaluationStatus() == datamodel.PRELIMINARY:
|
||||
self.runAlert(org.latitude().value(),
|
||||
org.longitude().value())
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
#ao = datamodel.ArtificialOriginMessage.Cast(msg)
|
||||
# if ao:
|
||||
# org = ao.origin()
|
||||
# if org:
|
||||
# self.runAlert(org.latitude().value(), org.longitude().value())
|
||||
# return
|
||||
|
||||
client.Application.handleMessage(self, msg)
|
||||
except BaseException:
|
||||
info = traceback.format_exception(*sys.exc_info())
|
||||
for i in info:
|
||||
sys.stderr.write(i)
|
||||
|
||||
def addObject(self, parentID, arg0):
|
||||
#pylint: disable=W0622
|
||||
try:
|
||||
obj = datamodel.Amplitude.Cast(arg0)
|
||||
if obj:
|
||||
if obj.type() == self._ampType:
|
||||
logging.debug("got new %s amplitude '%s'" % (
|
||||
self._ampType, obj.publicID()))
|
||||
self.notifyAmplitude(obj)
|
||||
|
||||
obj = datamodel.Origin.Cast(arg0)
|
||||
if obj:
|
||||
self._cache.feed(obj)
|
||||
logging.debug("got new origin '%s'" % obj.publicID())
|
||||
|
||||
try:
|
||||
if obj.evaluationStatus() == datamodel.PRELIMINARY:
|
||||
self.runAlert(obj.latitude().value(),
|
||||
obj.longitude().value())
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
return
|
||||
|
||||
obj = datamodel.Magnitude.Cast(arg0)
|
||||
if obj:
|
||||
self._cache.feed(obj)
|
||||
logging.debug(
|
||||
"got new magnitude '%s'" % obj.publicID())
|
||||
return
|
||||
|
||||
obj = datamodel.Event.Cast(arg0)
|
||||
if obj:
|
||||
org = self._cache.get(
|
||||
datamodel.Origin, obj.preferredOriginID())
|
||||
agencyID = org.creationInfo().agencyID()
|
||||
logging.debug("got new event '%s'" % obj.publicID())
|
||||
if not self._agencyIDs or agencyID in self._agencyIDs:
|
||||
self.notifyEvent(obj, True)
|
||||
except BaseException:
|
||||
info = traceback.format_exception(*sys.exc_info())
|
||||
for i in info:
|
||||
sys.stderr.write(i)
|
||||
|
||||
def updateObject(self, parentID, arg0):
|
||||
try:
|
||||
obj = datamodel.Event.Cast(arg0)
|
||||
if obj:
|
||||
org = self._cache.get(datamodel.Origin, obj.preferredOriginID())
|
||||
agencyID = org.creationInfo().agencyID()
|
||||
logging.debug("update event '%s'" % obj.publicID())
|
||||
if not self._agencyIDs or agencyID in self._agencyIDs:
|
||||
self.notifyEvent(obj, False)
|
||||
except BaseException:
|
||||
info = traceback.format_exception(*sys.exc_info())
|
||||
for i in info:
|
||||
sys.stderr.write(i)
|
||||
|
||||
def notifyAmplitude(self, amp):
|
||||
self.runAmpScript(amp.waveformID().networkCode(),
|
||||
amp.waveformID().stationCode(),
|
||||
amp.amplitude().value())
|
||||
|
||||
def notifyEvent(self, evt, newEvent=True):
|
||||
try:
|
||||
org = self._cache.get(datamodel.Origin, evt.preferredOriginID())
|
||||
if not org:
|
||||
logging.warning("unable to get origin %s, ignoring event "
|
||||
"message" % evt.preferredOriginID())
|
||||
return
|
||||
|
||||
preliminary = False
|
||||
try:
|
||||
if org.evaluationStatus() == datamodel.PRELIMINARY:
|
||||
preliminary = True
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
if not preliminary:
|
||||
nmag = self._cache.get(
|
||||
datamodel.Magnitude, evt.preferredMagnitudeID())
|
||||
if nmag:
|
||||
mag = nmag.magnitude().value()
|
||||
mag = "magnitude %.1f" % mag
|
||||
else:
|
||||
if len(evt.preferredMagnitudeID()) > 0:
|
||||
logging.warning(
|
||||
"unable to get magnitude %s, ignoring event "
|
||||
"message" % evt.preferredMagnitudeID())
|
||||
else:
|
||||
logging.warning(
|
||||
"no preferred magnitude yet, ignoring event message")
|
||||
return
|
||||
|
||||
# keep track of old events
|
||||
if self._newWhenFirstSeen:
|
||||
if evt.publicID() in self._prevMessage:
|
||||
newEvent = False
|
||||
else:
|
||||
newEvent = True
|
||||
|
||||
dsc = seismology.Regions.getRegionName(
|
||||
org.latitude().value(), org.longitude().value())
|
||||
|
||||
if self._eventDescriptionPattern:
|
||||
try:
|
||||
city, dist, _ = self.nearestCity(
|
||||
org.latitude().value(), org.longitude().value(),
|
||||
self._citiesMaxDist, self._citiesMinPopulation)
|
||||
if city:
|
||||
dsc = self._eventDescriptionPattern
|
||||
region = seismology.Regions.getRegionName(
|
||||
org.latitude().value(), org.longitude().value())
|
||||
distStr = str(int(math.deg2km(dist)))
|
||||
dsc = dsc.replace("@region@", region).replace(
|
||||
"@dist@", distStr).replace("@poi@", city.name())
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
logging.debug("desc: %s" % dsc)
|
||||
|
||||
dep = org.depth().value()
|
||||
now = core.Time.GMT()
|
||||
otm = org.time().value()
|
||||
|
||||
dt = (now - otm).seconds()
|
||||
|
||||
# if dt > dtmax:
|
||||
# return
|
||||
|
||||
if dt > 3600:
|
||||
dt = "%d hours %d minutes ago" % (dt/3600, (dt % 3600)/60)
|
||||
elif dt > 120:
|
||||
dt = "%d minutes ago" % (dt/60)
|
||||
else:
|
||||
dt = "%d seconds ago" % dt
|
||||
|
||||
if preliminary:
|
||||
message = "earthquake, preliminary, %%s, %s" % dsc
|
||||
else:
|
||||
message = "earthquake, %%s, %s, %s, depth %d kilometers" % (
|
||||
dsc, mag, int(dep+0.5))
|
||||
# at this point the message lacks the "ago" part
|
||||
|
||||
if evt.publicID() in self._prevMessage and \
|
||||
self._prevMessage[evt.publicID()] == message:
|
||||
logging.info("Suppressing repeated message '%s'" % message)
|
||||
return
|
||||
|
||||
self._prevMessage[evt.publicID()] = message
|
||||
message = message % dt # fill the "ago" part
|
||||
logging.info(message)
|
||||
|
||||
if not self._eventScript:
|
||||
return
|
||||
|
||||
if self._eventProc is not None:
|
||||
if self._eventProc.poll() is None:
|
||||
logging.warning(
|
||||
"EventScript still in progress -> skipping message")
|
||||
return
|
||||
|
||||
try:
|
||||
param2 = 0
|
||||
param3 = 0
|
||||
param4 = ""
|
||||
if newEvent:
|
||||
param2 = 1
|
||||
|
||||
org = self._cache.get(
|
||||
datamodel.Origin, evt.preferredOriginID())
|
||||
if org:
|
||||
try:
|
||||
param3 = org.quality().associatedPhaseCount()
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
nmag = self._cache.get(
|
||||
datamodel.Magnitude, evt.preferredMagnitudeID())
|
||||
if nmag:
|
||||
param4 = "%.1f" % nmag.magnitude().value()
|
||||
|
||||
self._eventProc = subprocess.Popen(
|
||||
[self._eventScript, message, "%d" % param2, evt.publicID(),
|
||||
"%d" % param3, param4])
|
||||
logging.info(
|
||||
"Started event script with pid %d" % self._eventProc.pid)
|
||||
except BaseException:
|
||||
logging.error(
|
||||
"Failed to start event script '%s %s %d %d %s'" % (
|
||||
self._eventScript, message, param2, param3, param4))
|
||||
except BaseException:
|
||||
info = traceback.format_exception(*sys.exc_info())
|
||||
for i in info:
|
||||
sys.stderr.write(i)
|
||||
|
||||
|
||||
app = VoiceAlert(len(sys.argv), sys.argv)
|
||||
sys.exit(app())
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,55 @@
|
||||
#!/bin/sh -e
|
||||
|
||||
# Resolve softlink to seiscomp executable first
|
||||
if test -L "$0"
|
||||
then
|
||||
# $0 is a link
|
||||
target="$(readlink "$0")"
|
||||
case "$target" in
|
||||
/*)
|
||||
d="$target"
|
||||
;;
|
||||
*)
|
||||
d="$(dirname "$0")/$target"
|
||||
;;
|
||||
esac
|
||||
else
|
||||
# $0 is NOT a link
|
||||
case "$0" in
|
||||
*/* | /*)
|
||||
d="$0"
|
||||
;;
|
||||
*)
|
||||
d="$(command -v "$0")"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
normalized_dirname() {
|
||||
# Normalize directory name without following symlinks.
|
||||
# Brute-force but portable.
|
||||
cd "${1%/*}" && pwd || exit 1
|
||||
}
|
||||
|
||||
# Determine the root directory of the 'seiscomp' utility.
|
||||
d="$(normalized_dirname "$d")"
|
||||
SEISCOMP_ROOT="$(realpath "${d%/bin}")"
|
||||
|
||||
export SEISCOMP_ROOT
|
||||
export PATH="$SEISCOMP_ROOT/bin:$PATH"
|
||||
export LD_LIBRARY_PATH="$SEISCOMP_ROOT/lib:$LD_LIBRARY_PATH"
|
||||
export PYTHONPATH="$SEISCOMP_ROOT/lib/python:$PYTHONPATH"
|
||||
export MANPATH="$SEISCOMP_ROOT/share/man:$MANPATH"
|
||||
|
||||
HOSTENV=$SEISCOMP_ROOT/etc/env/by-hostname/$(hostname)
|
||||
test -f $HOSTENV && . $HOSTENV
|
||||
|
||||
case $1 in
|
||||
exec)
|
||||
shift
|
||||
exec "$@"
|
||||
;;
|
||||
*)
|
||||
exec $SEISCOMP_ROOT/bin/seiscomp-python "$SEISCOMP_ROOT/bin/seiscomp-control.py" "$@"
|
||||
;;
|
||||
esac
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,19 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# This is a shell script that executes the Python interpreter as
|
||||
# configured using cmake.
|
||||
#
|
||||
# In order to use this in your Python programs use this
|
||||
# shebang line:
|
||||
|
||||
#!/usr/bin/env seiscomp-python
|
||||
|
||||
# Please note that this wrapper does *not* set the environment
|
||||
# variables for you. To ensure that you run your script in the
|
||||
# proper environment, please use 'seiscomp exec'. Alternatively
|
||||
# you can also set your environment variables according to the
|
||||
# output of 'seiscomp print env'.
|
||||
|
||||
python_executable="/usr/bin/python3"
|
||||
|
||||
exec $python_executable "$@"
|
@ -0,0 +1,884 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
# -*- coding: utf-8 -*-
|
||||
############################################################################
|
||||
# Copyright (C) GFZ Potsdam #
|
||||
# All rights reserved. #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
# #
|
||||
# Author: Alexander Jaeger, Stephan Herrnkind, #
|
||||
# Lukas Lehmann, Dirk Roessler# #
|
||||
# Email: herrnkind@gempa.de #
|
||||
############################################################################
|
||||
|
||||
|
||||
import seiscomp.client, seiscomp.core, seiscomp.datamodel, seiscomp.io, seiscomp.logging, seiscomp.math
|
||||
from time import strptime
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
TimeFormats = [
|
||||
'%d-%b-%Y_%H:%M:%S.%f',
|
||||
'%d-%b-%Y_%H:%M:%S'
|
||||
]
|
||||
|
||||
|
||||
# SC3 has more event types available in the datamodel
|
||||
EventTypes = {
|
||||
'teleseismic quake': seiscomp.datamodel.EARTHQUAKE,
|
||||
'local quake': seiscomp.datamodel.EARTHQUAKE,
|
||||
'regional quake': seiscomp.datamodel.EARTHQUAKE,
|
||||
'quarry blast': seiscomp.datamodel.QUARRY_BLAST,
|
||||
'nuclear explosion': seiscomp.datamodel.NUCLEAR_EXPLOSION,
|
||||
'mining event': seiscomp.datamodel.MINING_EXPLOSION
|
||||
}
|
||||
|
||||
|
||||
def wfs2Str(wfsID):
|
||||
return '%s.%s.%s.%s' % (wfsID.networkCode(), wfsID.stationCode(),
|
||||
wfsID.locationCode(), wfsID.channelCode())
|
||||
|
||||
|
||||
###############################################################################
|
||||
class SH2Proc(seiscomp.client.Application):
|
||||
|
||||
###########################################################################
|
||||
def __init__(self):
|
||||
seiscomp.client.Application.__init__(self, len(sys.argv), sys.argv)
|
||||
self.setMessagingEnabled(True)
|
||||
self.setDatabaseEnabled(True, True)
|
||||
self.setLoadInventoryEnabled(True)
|
||||
self.setLoadConfigModuleEnabled(True)
|
||||
self.setDaemonEnabled(False)
|
||||
|
||||
self.inputFile = '-'
|
||||
|
||||
###########################################################################
|
||||
def initConfiguration(self):
|
||||
if not seiscomp.client.Application.initConfiguration(self):
|
||||
return False
|
||||
|
||||
# If the database connection is passed via command line or configuration
|
||||
# file then messaging is disabled. Messaging is only used to get
|
||||
# the configured database connection URI.
|
||||
if self.databaseURI() != '':
|
||||
self.setMessagingEnabled(False)
|
||||
else:
|
||||
# A database connection is not required if the inventory is loaded
|
||||
# from file
|
||||
if not self.isInventoryDatabaseEnabled():
|
||||
self.setMessagingEnabled(False)
|
||||
self.setDatabaseEnabled(False, False)
|
||||
|
||||
return True
|
||||
|
||||
##########################################################################
|
||||
def printUsage(self):
|
||||
|
||||
print('''Usage:
|
||||
sh2proc [options]
|
||||
|
||||
Convert Seismic Handler event data to SeisComP XML format''')
|
||||
|
||||
seiscomp.client.Application.printUsage(self)
|
||||
|
||||
print('''Examples:
|
||||
Convert the Seismic Handler file shm.evt to SCML. Receive the database
|
||||
connection to read inventory and configuration information from messaging
|
||||
sh2proc shm.evt
|
||||
|
||||
Read Seismic Handler data from stdin. Provide inventory and configuration in XML
|
||||
cat shm.evt | sh2proc --inventory-db=inventory.xml --config-db=config.xml
|
||||
''')
|
||||
|
||||
##########################################################################
|
||||
def validateParameters(self):
|
||||
if not seiscomp.client.Application.validateParameters(self):
|
||||
return False
|
||||
|
||||
for opt in self.commandline().unrecognizedOptions():
|
||||
if len(opt) > 1 and opt.startswith('-'):
|
||||
continue
|
||||
|
||||
self.inputFile = opt
|
||||
break
|
||||
|
||||
return True
|
||||
|
||||
###########################################################################
|
||||
def loadStreams(self):
|
||||
now = seiscomp.core.Time.GMT()
|
||||
inv = seiscomp.client.Inventory.Instance()
|
||||
|
||||
self.streams = {}
|
||||
|
||||
# try to load streams by detecLocid and detecStream
|
||||
mod = self.configModule()
|
||||
if mod is not None and mod.configStationCount() > 0:
|
||||
seiscomp.logging.info('loading streams using detecLocid and detecStream')
|
||||
for i in range(mod.configStationCount()):
|
||||
cfg = mod.configStation(i)
|
||||
net = cfg.networkCode()
|
||||
sta = cfg.stationCode()
|
||||
if sta in self.streams:
|
||||
seiscomp.logging.warning(
|
||||
'ambiguous stream id found for station %s.%s' % (net, sta))
|
||||
continue
|
||||
|
||||
setup = seiscomp.datamodel.findSetup(cfg, self.name(), True)
|
||||
if not setup:
|
||||
seiscomp.logging.warning(
|
||||
'could not find station setup for %s.%s' % (net, sta))
|
||||
continue
|
||||
|
||||
params = seiscomp.datamodel.ParameterSet.Find(setup.parameterSetID())
|
||||
if not params:
|
||||
seiscomp.logging.warning(
|
||||
'could not find station parameters for %s.%s' % (net, sta))
|
||||
continue
|
||||
|
||||
detecLocid = ''
|
||||
detecStream = None
|
||||
|
||||
for j in range(params.parameterCount()):
|
||||
param = params.parameter(j)
|
||||
if param.name() == 'detecStream':
|
||||
detecStream = param.value()
|
||||
elif param.name() == 'detecLocid':
|
||||
detecLocid = param.value()
|
||||
|
||||
if detecStream is None:
|
||||
seiscomp.logging.warning(
|
||||
'could not find detecStream for %s.%s' % (net, sta))
|
||||
continue
|
||||
|
||||
loc = inv.getSensorLocation(net, sta, detecLocid, now)
|
||||
if loc is None:
|
||||
seiscomp.logging.warning(
|
||||
'could not find preferred location for %s.%s' % (net, sta))
|
||||
continue
|
||||
|
||||
components = {}
|
||||
tc = seiscomp.datamodel.ThreeComponents()
|
||||
seiscomp.datamodel.getThreeComponents(tc, loc, detecStream[:2], now)
|
||||
if tc.vertical():
|
||||
cha = tc.vertical()
|
||||
wfsID = seiscomp.datamodel.WaveformStreamID(net, sta, loc.code(),
|
||||
cha.code(), '')
|
||||
components[cha.code()[-1]] = wfsID
|
||||
seiscomp.logging.debug('add stream %s (vertical)' % wfs2Str(wfsID))
|
||||
if tc.firstHorizontal():
|
||||
cha = tc.firstHorizontal()
|
||||
wfsID = seiscomp.datamodel.WaveformStreamID(net, sta, loc.code(),
|
||||
cha.code(), '')
|
||||
components[cha.code()[-1]] = wfsID
|
||||
seiscomp.logging.debug('add stream %s (first horizontal)' % wfs2Str(wfsID))
|
||||
if tc.secondHorizontal():
|
||||
cha = tc.secondHorizontal()
|
||||
wfsID = seiscomp.datamodel.WaveformStreamID(net, sta, loc.code(),
|
||||
cha.code(), '')
|
||||
components[cha.code()[-1]] = wfsID
|
||||
seiscomp.logging.debug('add stream %s (second horizontal)' % wfs2Str(wfsID))
|
||||
if len(components) > 0:
|
||||
self.streams[sta] = components
|
||||
|
||||
return
|
||||
|
||||
# fallback loading streams from inventory
|
||||
seiscomp.logging.warning(
|
||||
'no configuration module available, loading streams '
|
||||
'from inventory and selecting first available stream '
|
||||
'matching epoch')
|
||||
for iNet in range(inv.inventory().networkCount()):
|
||||
net = inv.inventory().network(iNet)
|
||||
seiscomp.logging.debug('network %s: loaded %i stations' % (net.code(), net.stationCount()))
|
||||
for iSta in range(net.stationCount()):
|
||||
sta = net.station(iSta)
|
||||
try:
|
||||
start = sta.start()
|
||||
if not start <= now:
|
||||
continue
|
||||
except:
|
||||
continue
|
||||
|
||||
try:
|
||||
end = sta.end()
|
||||
if not now <= end:
|
||||
continue
|
||||
except:
|
||||
pass
|
||||
|
||||
for iLoc in range(sta.sensorLocationCount()):
|
||||
loc = sta.sensorLocation(iLoc)
|
||||
for iCha in range(loc.streamCount()):
|
||||
cha = loc.stream(iCha)
|
||||
|
||||
wfsID = seiscomp.datamodel.WaveformStreamID(net.code(),
|
||||
sta.code(), loc.code(), cha.code(), '')
|
||||
comp = cha.code()[2]
|
||||
if sta.code() not in self.streams:
|
||||
components = {}
|
||||
components[comp] = wfsID
|
||||
self.streams[sta.code()] = components
|
||||
else:
|
||||
# Seismic Handler does not support network,
|
||||
# location and channel code: make sure network and
|
||||
# location codes match first item in station
|
||||
# specific steam list
|
||||
oldWfsID = list(self.streams[sta.code()].values())[0]
|
||||
if net.code() != oldWfsID.networkCode() or \
|
||||
loc.code() != oldWfsID.locationCode() or \
|
||||
cha.code()[:2] != oldWfsID.channelCode()[:2]:
|
||||
seiscomp.logging.warning(
|
||||
'ambiguous stream id found for station %s, ignoring %s'
|
||||
% (sta.code(), wfs2Str(wfsID)))
|
||||
continue
|
||||
|
||||
self.streams[sta.code()][comp] = wfsID
|
||||
|
||||
seiscomp.logging.debug('add stream %s' % wfs2Str(wfsID))
|
||||
|
||||
###########################################################################
|
||||
def parseTime(self, timeStr):
|
||||
time = seiscomp.core.Time()
|
||||
for fmt in TimeFormats:
|
||||
if time.fromString(timeStr, fmt):
|
||||
break
|
||||
return time
|
||||
|
||||
###########################################################################
|
||||
def parseMagType(self, value):
|
||||
if value == 'm':
|
||||
return 'M'
|
||||
elif value == 'ml':
|
||||
return 'ML'
|
||||
elif value == 'mb':
|
||||
return 'mb'
|
||||
elif value == 'ms':
|
||||
return 'Ms(BB)'
|
||||
elif value == 'mw':
|
||||
return 'Mw'
|
||||
elif value == 'bb':
|
||||
return 'mB'
|
||||
|
||||
return ''
|
||||
|
||||
###########################################################################
|
||||
def sh2proc(self, file):
|
||||
ep = seiscomp.datamodel.EventParameters()
|
||||
origin = seiscomp.datamodel.Origin.Create()
|
||||
event = seiscomp.datamodel.Event.Create()
|
||||
|
||||
origin.setCreationInfo(seiscomp.datamodel.CreationInfo())
|
||||
origin.creationInfo().setCreationTime(seiscomp.core.Time.GMT())
|
||||
|
||||
originQuality = None
|
||||
originCE = None
|
||||
latFound = False
|
||||
lonFound = False
|
||||
depthError = None
|
||||
originComments = {}
|
||||
|
||||
# variables, reset after 'end of phase'
|
||||
pick = None
|
||||
stationMag = None
|
||||
staCode = None
|
||||
compCode = None
|
||||
stationMagBB = None
|
||||
|
||||
amplitudeDisp = None
|
||||
amplitudeVel = None
|
||||
amplitudeSNR = None
|
||||
amplitudeBB = None
|
||||
|
||||
magnitudeMB = None
|
||||
magnitudeML = None
|
||||
magnitudeMS = None
|
||||
magnitudeBB = None
|
||||
|
||||
km2degFac = 1.0 / seiscomp.math.deg2km(1.0)
|
||||
|
||||
# read file line by line, split key and value at colon
|
||||
iLine = 0
|
||||
for line in file:
|
||||
iLine += 1
|
||||
a = line.split(':', 1)
|
||||
key = a[0].strip()
|
||||
keyLower = key.lower()
|
||||
value = None
|
||||
|
||||
# empty line
|
||||
if len(keyLower) == 0:
|
||||
continue
|
||||
|
||||
# end of phase
|
||||
elif keyLower == '--- end of phase ---':
|
||||
if pick is None:
|
||||
seiscomp.logging.warning(
|
||||
'Line %i: found empty phase block' % iLine)
|
||||
continue
|
||||
|
||||
if staCode is None or compCode is None:
|
||||
seiscomp.logging.warning(
|
||||
'Line %i: end of phase, stream code incomplete' % iLine)
|
||||
continue
|
||||
|
||||
if not staCode in self.streams:
|
||||
seiscomp.logging.warning(
|
||||
'Line %i: end of phase, station code %s not found in inventory' % (iLine, staCode))
|
||||
continue
|
||||
|
||||
if not compCode in self.streams[staCode]:
|
||||
seiscomp.logging.warning(
|
||||
'Line %i: end of phase, component %s of station %s not found in inventory' % (iLine, compCode, staCode))
|
||||
continue
|
||||
|
||||
streamID = self.streams[staCode][compCode]
|
||||
|
||||
pick.setWaveformID(streamID)
|
||||
ep.add(pick)
|
||||
|
||||
arrival.setPickID(pick.publicID())
|
||||
arrival.setPhase(phase)
|
||||
origin.add(arrival)
|
||||
|
||||
if amplitudeSNR is not None:
|
||||
amplitudeSNR.setPickID(pick.publicID())
|
||||
amplitudeSNR.setWaveformID(streamID)
|
||||
ep.add(amplitudeSNR)
|
||||
|
||||
if amplitudeBB is not None:
|
||||
amplitudeBB.setPickID(pick.publicID())
|
||||
amplitudeBB.setWaveformID(streamID)
|
||||
ep.add(amplitudeBB)
|
||||
|
||||
if stationMagBB is not None:
|
||||
stationMagBB.setWaveformID(streamID)
|
||||
origin.add(stationMagBB)
|
||||
stationMagContrib = seiscomp.datamodel.StationMagnitudeContribution()
|
||||
stationMagContrib.setStationMagnitudeID(
|
||||
stationMagBB.publicID())
|
||||
if magnitudeBB is None:
|
||||
magnitudeBB = seiscomp.datamodel.Magnitude.Create()
|
||||
magnitudeBB.add(stationMagContrib)
|
||||
|
||||
if stationMag is not None:
|
||||
if stationMag.type() in ['mb', 'ML'] and amplitudeDisp is not None:
|
||||
amplitudeDisp.setPickID(pick.publicID())
|
||||
amplitudeDisp.setWaveformID(streamID)
|
||||
amplitudeDisp.setPeriod(
|
||||
seiscomp.datamodel.RealQuantity(ampPeriod))
|
||||
amplitudeDisp.setType(stationMag.type())
|
||||
ep.add(amplitudeDisp)
|
||||
|
||||
if stationMag.type() in ['Ms(BB)'] and amplitudeVel is not None:
|
||||
amplitudeVel.setPickID(pick.publicID())
|
||||
amplitudeVel.setWaveformID(streamID)
|
||||
amplitudeVel.setPeriod(
|
||||
seiscomp.datamodel.RealQuantity(ampPeriod))
|
||||
amplitudeVel.setType(stationMag.type())
|
||||
ep.add(amplitudeVel)
|
||||
|
||||
stationMag.setWaveformID(streamID)
|
||||
origin.add(stationMag)
|
||||
|
||||
stationMagContrib = seiscomp.datamodel.StationMagnitudeContribution()
|
||||
stationMagContrib.setStationMagnitudeID(
|
||||
stationMag.publicID())
|
||||
|
||||
magType = stationMag.type()
|
||||
if magType == 'ML':
|
||||
if magnitudeML is None:
|
||||
magnitudeML = seiscomp.datamodel.Magnitude.Create()
|
||||
magnitudeML.add(stationMagContrib)
|
||||
|
||||
elif magType == 'Ms(BB)':
|
||||
if magnitudeMS is None:
|
||||
magnitudeMS = seiscomp.datamodel.Magnitude.Create()
|
||||
magnitudeMS.add(stationMagContrib)
|
||||
|
||||
elif magType == 'mb':
|
||||
if magnitudeMB is None:
|
||||
magnitudeMB = seiscomp.datamodel.Magnitude.Create()
|
||||
magnitudeMB.add(stationMagContrib)
|
||||
|
||||
pick = None
|
||||
staCode = None
|
||||
compCode = None
|
||||
stationMag = None
|
||||
stationMagBB = None
|
||||
amplitudeDisp = None
|
||||
amplitudeVel = None
|
||||
amplitudeSNR = None
|
||||
amplitudeBB = None
|
||||
continue
|
||||
|
||||
# empty key
|
||||
elif len(a) == 1:
|
||||
seiscomp.logging.warning('Line %i: key without value' % iLine)
|
||||
continue
|
||||
|
||||
value = a[1].strip()
|
||||
if pick is None:
|
||||
pick = seiscomp.datamodel.Pick.Create()
|
||||
arrival = seiscomp.datamodel.Arrival()
|
||||
|
||||
try:
|
||||
##############################################################
|
||||
# station parameters
|
||||
|
||||
# station code
|
||||
if keyLower == 'station code':
|
||||
staCode = value
|
||||
|
||||
# pick time
|
||||
elif keyLower == 'onset time':
|
||||
pick.setTime(seiscomp.datamodel.TimeQuantity(self.parseTime(value)))
|
||||
|
||||
# pick onset type
|
||||
elif keyLower == 'onset type':
|
||||
found = False
|
||||
for onset in [seiscomp.datamodel.EMERGENT, seiscomp.datamodel.IMPULSIVE,
|
||||
seiscomp.datamodel.QUESTIONABLE]:
|
||||
if value == seiscomp.datamodel.EPickOnsetNames_name(onset):
|
||||
pick.setOnset(onset)
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
raise Exception('Unsupported onset value')
|
||||
|
||||
# phase code
|
||||
elif keyLower == 'phase name':
|
||||
phase = seiscomp.datamodel.Phase()
|
||||
phase.setCode(value)
|
||||
pick.setPhaseHint(phase)
|
||||
|
||||
# event type
|
||||
elif keyLower == 'event type':
|
||||
evttype = EventTypes[value]
|
||||
event.setType(evttype)
|
||||
originComments[key] = value
|
||||
|
||||
# filter ID
|
||||
elif keyLower == 'applied filter':
|
||||
pick.setFilterID(value)
|
||||
|
||||
# channel code, prepended by configured Channel prefix if only
|
||||
# one character is found
|
||||
elif keyLower == 'component':
|
||||
compCode = value
|
||||
|
||||
# pick evaluation mode
|
||||
elif keyLower == 'pick type':
|
||||
found = False
|
||||
for mode in [seiscomp.datamodel.AUTOMATIC, seiscomp.datamodel.MANUAL]:
|
||||
if value == seiscomp.datamodel.EEvaluationModeNames_name(mode):
|
||||
pick.setEvaluationMode(mode)
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
raise Exception('Unsupported evaluation mode value')
|
||||
|
||||
# pick author
|
||||
elif keyLower == 'analyst':
|
||||
creationInfo = seiscomp.datamodel.CreationInfo()
|
||||
creationInfo.setAuthor(value)
|
||||
pick.setCreationInfo(creationInfo)
|
||||
|
||||
# pick polarity
|
||||
# isn't tested
|
||||
elif keyLower == 'sign':
|
||||
if value == 'positive':
|
||||
sign = '0' # positive
|
||||
elif value == 'negative':
|
||||
sign = '1' # negative
|
||||
else:
|
||||
sign = '2' # unknown
|
||||
pick.setPolarity(float(sign))
|
||||
|
||||
# arrival weight
|
||||
elif keyLower == 'weight':
|
||||
arrival.setWeight(float(value))
|
||||
|
||||
# arrival azimuth
|
||||
elif keyLower == 'theo. azimuth (deg)':
|
||||
arrival.setAzimuth(float(value))
|
||||
|
||||
# pick theo backazimuth
|
||||
elif keyLower == 'theo. backazimuth (deg)':
|
||||
if pick.slownessMethodID() == 'corrected':
|
||||
seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
|
||||
else:
|
||||
pick.setBackazimuth(
|
||||
seiscomp.datamodel.RealQuantity(float(value)))
|
||||
pick.setSlownessMethodID('theoretical')
|
||||
|
||||
# pick beam slowness
|
||||
elif keyLower == 'beam-slowness (sec/deg)':
|
||||
if pick.slownessMethodID() == 'corrected':
|
||||
seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
|
||||
else:
|
||||
pick.setHorizontalSlowness(
|
||||
seiscomp.datamodel.RealQuantity(float(value)))
|
||||
pick.setSlownessMethodID('Array Beam')
|
||||
|
||||
# pick beam backazimuth
|
||||
elif keyLower == 'beam-azimuth (deg)':
|
||||
if pick.slownessMethodID() == 'corrected':
|
||||
seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
|
||||
else:
|
||||
pick.setBackazimuth(
|
||||
seiscomp.datamodel.RealQuantity(float(value)))
|
||||
|
||||
# pick epi slowness
|
||||
elif keyLower == 'epi-slowness (sec/deg)':
|
||||
pick.setHorizontalSlowness(
|
||||
seiscomp.datamodel.RealQuantity(float(value)))
|
||||
pick.setSlownessMethodID('corrected')
|
||||
|
||||
# pick epi backazimuth
|
||||
elif keyLower == 'epi-azimuth (deg)':
|
||||
pick.setBackazimuth(seiscomp.datamodel.RealQuantity(float(value)))
|
||||
|
||||
# arrival distance degree
|
||||
elif keyLower == 'distance (deg)':
|
||||
arrival.setDistance(float(value))
|
||||
|
||||
# arrival distance km, recalculates for degree
|
||||
elif keyLower == 'distance (km)':
|
||||
if isinstance(arrival.distance(), float):
|
||||
seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine-1, 'distance (deg)'))
|
||||
arrival.setDistance(float(value) * km2degFac)
|
||||
|
||||
# arrival time residual
|
||||
elif keyLower == 'residual time':
|
||||
arrival.setTimeResidual(float(value))
|
||||
|
||||
# amplitude snr
|
||||
elif keyLower == 'signal/noise':
|
||||
amplitudeSNR = seiscomp.datamodel.Amplitude.Create()
|
||||
amplitudeSNR.setType('SNR')
|
||||
amplitudeSNR.setAmplitude(
|
||||
seiscomp.datamodel.RealQuantity(float(value)))
|
||||
|
||||
# amplitude period
|
||||
elif keyLower.startswith('period'):
|
||||
ampPeriod = float(value)
|
||||
|
||||
# amplitude value for displacement
|
||||
elif keyLower == 'amplitude (nm)':
|
||||
amplitudeDisp = seiscomp.datamodel.Amplitude.Create()
|
||||
amplitudeDisp.setAmplitude(
|
||||
seiscomp.datamodel.RealQuantity(float(value)))
|
||||
amplitudeDisp.setUnit('nm')
|
||||
|
||||
# amplitude value for velocity
|
||||
elif keyLower.startswith('vel. amplitude'):
|
||||
amplitudeVel = seiscomp.datamodel.Amplitude.Create()
|
||||
amplitudeVel.setAmplitude(
|
||||
seiscomp.datamodel.RealQuantity(float(value)))
|
||||
amplitudeVel.setUnit('nm/s')
|
||||
|
||||
elif keyLower == 'bb amplitude (nm/sec)':
|
||||
amplitudeBB = seiscomp.datamodel.Amplitude.Create()
|
||||
amplitudeBB.setAmplitude(
|
||||
seiscomp.datamodel.RealQuantity(float(value)))
|
||||
amplitudeBB.setType('mB')
|
||||
amplitudeBB.setUnit('nm/s')
|
||||
amplitudeBB.setPeriod(seiscomp.datamodel.RealQuantity(ampBBPeriod))
|
||||
|
||||
elif keyLower == 'bb period (sec)':
|
||||
ampBBPeriod = float(value)
|
||||
|
||||
elif keyLower == 'broadband magnitude':
|
||||
magType = self.parseMagType('bb')
|
||||
stationMagBB = seiscomp.datamodel.StationMagnitude.Create()
|
||||
stationMagBB.setMagnitude(
|
||||
seiscomp.datamodel.RealQuantity(float(value)))
|
||||
stationMagBB.setType(magType)
|
||||
stationMagBB.setAmplitudeID(amplitudeBB.publicID())
|
||||
|
||||
# ignored
|
||||
elif keyLower == 'quality number':
|
||||
seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
|
||||
|
||||
# station magnitude value and type
|
||||
elif keyLower.startswith('magnitude '):
|
||||
magType = self.parseMagType(key[10:])
|
||||
stationMag = seiscomp.datamodel.StationMagnitude.Create()
|
||||
stationMag.setMagnitude(
|
||||
seiscomp.datamodel.RealQuantity(float(value)))
|
||||
|
||||
if len(magType) > 0:
|
||||
stationMag.setType(magType)
|
||||
if magType == 'mb':
|
||||
stationMag.setAmplitudeID(amplitudeDisp.publicID())
|
||||
|
||||
elif magType == 'MS(BB)':
|
||||
stationMag.setAmplitudeID(amplitudeVel.publicID())
|
||||
else:
|
||||
seiscomp.logging.debug('Line %i: Magnitude Type not known %s.' % (iLine, magType))
|
||||
|
||||
###############################################################
|
||||
# origin parameters
|
||||
|
||||
# event ID, added as origin comment later on
|
||||
elif keyLower == 'event id':
|
||||
originComments[key] = value
|
||||
|
||||
# magnitude value and type
|
||||
elif keyLower == 'mean bb magnitude':
|
||||
magType = self.parseMagType('bb')
|
||||
if magnitudeBB is None:
|
||||
magnitudeBB = seiscomp.datamodel.Magnitude.Create()
|
||||
magnitudeBB.setMagnitude(
|
||||
seiscomp.datamodel.RealQuantity(float(value)))
|
||||
magnitudeBB.setType(magType)
|
||||
|
||||
elif keyLower.startswith('mean magnitude '):
|
||||
magType = self.parseMagType(key[15:])
|
||||
|
||||
if magType == 'ML':
|
||||
if magnitudeML is None:
|
||||
magnitudeML = seiscomp.datamodel.Magnitude.Create()
|
||||
magnitudeML.setMagnitude(
|
||||
seiscomp.datamodel.RealQuantity(float(value)))
|
||||
magnitudeML.setType(magType)
|
||||
|
||||
elif magType == 'Ms(BB)':
|
||||
if magnitudeMS is None:
|
||||
magnitudeMS = seiscomp.datamodel.Magnitude.Create()
|
||||
magnitudeMS.setMagnitude(
|
||||
seiscomp.datamodel.RealQuantity(float(value)))
|
||||
magnitudeMS.setType(magType)
|
||||
|
||||
elif magType == 'mb':
|
||||
if magnitudeMB is None:
|
||||
magnitudeMB = seiscomp.datamodel.Magnitude.Create()
|
||||
magnitudeMB.setMagnitude(
|
||||
seiscomp.datamodel.RealQuantity(float(value)))
|
||||
magnitudeMB.setType(magType)
|
||||
|
||||
else:
|
||||
seiscomp.logging.warning('Line %i: Magnitude type %s not defined yet.' % (iLine, magType))
|
||||
|
||||
# latitude
|
||||
elif keyLower == 'latitude':
|
||||
origin.latitude().setValue(float(value))
|
||||
latFound = True
|
||||
elif keyLower == 'error in latitude (km)':
|
||||
origin.latitude().setUncertainty(float(value))
|
||||
|
||||
# longitude
|
||||
elif keyLower == 'longitude':
|
||||
origin.longitude().setValue(float(value))
|
||||
lonFound = True
|
||||
elif keyLower == 'error in longitude (km)':
|
||||
origin.longitude().setUncertainty(float(value))
|
||||
|
||||
# depth
|
||||
elif keyLower == 'depth (km)':
|
||||
origin.setDepth(seiscomp.datamodel.RealQuantity(float(value)))
|
||||
if depthError is not None:
|
||||
origin.depth().setUncertainty(depthError)
|
||||
elif keyLower == 'depth type':
|
||||
seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
|
||||
elif keyLower == 'error in depth (km)':
|
||||
depthError = float(value)
|
||||
try:
|
||||
origin.depth().setUncertainty(depthError)
|
||||
except seiscomp.core.ValueException:
|
||||
pass
|
||||
|
||||
# time
|
||||
elif keyLower == 'origin time':
|
||||
origin.time().setValue(self.parseTime(value))
|
||||
elif keyLower == 'error in origin time':
|
||||
origin.time().setUncertainty(float(value))
|
||||
|
||||
# location method
|
||||
elif keyLower == 'location method':
|
||||
origin.setMethodID(str(value))
|
||||
|
||||
# region table, added as origin comment later on
|
||||
elif keyLower == 'region table':
|
||||
originComments[key] = value
|
||||
|
||||
# region table, added as origin comment later on
|
||||
elif keyLower == 'region id':
|
||||
originComments[key] = value
|
||||
|
||||
# source region, added as origin comment later on
|
||||
elif keyLower == 'source region':
|
||||
originComments[key] = value
|
||||
|
||||
# used station count
|
||||
elif keyLower == 'no. of stations used':
|
||||
if originQuality is None:
|
||||
originQuality = seiscomp.datamodel.OriginQuality()
|
||||
originQuality.setUsedStationCount(int(value))
|
||||
|
||||
# ignored
|
||||
elif keyLower == 'reference location name':
|
||||
seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
|
||||
|
||||
# confidence ellipsoid major axis
|
||||
elif keyLower == 'error ellipse major':
|
||||
if originCE is None:
|
||||
originCE = seiscomp.datamodel.ConfidenceEllipsoid()
|
||||
originCE.setSemiMajorAxisLength(float(value))
|
||||
|
||||
# confidence ellipsoid minor axis
|
||||
elif keyLower == 'error ellipse minor':
|
||||
if originCE is None:
|
||||
originCE = seiscomp.datamodel.ConfidenceEllipsoid()
|
||||
originCE.setSemiMinorAxisLength(float(value))
|
||||
|
||||
# confidence ellipsoid rotation
|
||||
elif keyLower == 'error ellipse strike':
|
||||
if originCE is None:
|
||||
originCE = seiscomp.datamodel.ConfidenceEllipsoid()
|
||||
originCE.setMajorAxisRotation(float(value))
|
||||
|
||||
# azimuthal gap
|
||||
elif keyLower == 'max azimuthal gap (deg)':
|
||||
if originQuality is None:
|
||||
originQuality = seiscomp.datamodel.OriginQuality()
|
||||
originQuality.setAzimuthalGap(float(value))
|
||||
|
||||
# creation info author
|
||||
elif keyLower == 'author':
|
||||
origin.creationInfo().setAuthor(value)
|
||||
|
||||
# creation info agency
|
||||
elif keyLower == 'source of information':
|
||||
origin.creationInfo().setAgencyID(value)
|
||||
|
||||
# earth model id
|
||||
elif keyLower == 'velocity model':
|
||||
origin.setEarthModelID(value)
|
||||
|
||||
# standard error
|
||||
elif keyLower == 'rms of residuals (sec)':
|
||||
if originQuality is None:
|
||||
originQuality = seiscomp.datamodel.OriginQuality()
|
||||
originQuality.setStandardError(float(value))
|
||||
|
||||
# ignored
|
||||
elif keyLower == 'phase flags':
|
||||
seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
|
||||
|
||||
# ignored
|
||||
elif keyLower == 'location input params':
|
||||
seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
|
||||
|
||||
# missing keys
|
||||
elif keyLower == 'ampl&period source':
|
||||
seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
|
||||
|
||||
elif keyLower == 'location quality':
|
||||
seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
|
||||
|
||||
elif keyLower == 'reference latitude':
|
||||
seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
|
||||
|
||||
elif keyLower == 'reference longitude':
|
||||
seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
|
||||
|
||||
elif keyLower.startswith('amplitude time'):
|
||||
seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key))
|
||||
|
||||
# unknown key
|
||||
else:
|
||||
seiscomp.logging.warning('Line %i: ignoring unknown parameter: %s' % (iLine, key))
|
||||
|
||||
except ValueError as ve:
|
||||
seiscomp.logging.warning('Line %i: can not parse %s value' % (iLine, key))
|
||||
except Exception:
|
||||
seiscomp.logging.error('Line %i: %s' % (iLine, str(traceback.format_exc())))
|
||||
return None
|
||||
|
||||
# check
|
||||
if not latFound:
|
||||
seiscomp.logging.warning('could not add origin, missing latitude parameter')
|
||||
elif not lonFound:
|
||||
seiscomp.logging.warning('could not add origin, missing longitude parameter')
|
||||
elif not origin.time().value().valid():
|
||||
seiscomp.logging.warning('could not add origin, missing origin time parameter')
|
||||
else:
|
||||
if magnitudeMB is not None:
|
||||
origin.add(magnitudeMB)
|
||||
if magnitudeML is not None:
|
||||
origin.add(magnitudeML)
|
||||
if magnitudeMS is not None:
|
||||
origin.add(magnitudeMS)
|
||||
if magnitudeBB is not None:
|
||||
origin.add(magnitudeBB)
|
||||
|
||||
ep.add(event)
|
||||
ep.add(origin)
|
||||
|
||||
if originQuality is not None:
|
||||
origin.setQuality(originQuality)
|
||||
|
||||
if originCE is not None:
|
||||
uncertainty = seiscomp.datamodel.OriginUncertainty()
|
||||
uncertainty.setConfidenceEllipsoid(originCE)
|
||||
origin.setUncertainty(uncertainty)
|
||||
|
||||
for k, v in originComments.items():
|
||||
comment = seiscomp.datamodel.Comment()
|
||||
comment.setId(k)
|
||||
comment.setText(v)
|
||||
origin.add(comment)
|
||||
|
||||
return ep
|
||||
|
||||
###########################################################################
|
||||
def run(self):
|
||||
self.loadStreams()
|
||||
|
||||
try:
|
||||
if self.inputFile == '-':
|
||||
f = sys.stdin
|
||||
else:
|
||||
f = open(self.inputFile)
|
||||
except IOError as e:
|
||||
seiscomp.logging.error(str(e))
|
||||
return False
|
||||
|
||||
ep = self.sh2proc(f)
|
||||
if ep is None:
|
||||
return False
|
||||
|
||||
ar = seiscomp.io.XMLArchive()
|
||||
ar.create('-')
|
||||
ar.setFormattedOutput(True)
|
||||
ar.writeObject(ep)
|
||||
ar.close()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
###############################################################################
|
||||
def main():
|
||||
try:
|
||||
app = SH2Proc()
|
||||
return app()
|
||||
except:
|
||||
sys.stderr.write(str(traceback.format_exc()))
|
||||
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
||||
|
||||
# vim: ts=4 et
|
Binary file not shown.
Binary file not shown.
@ -0,0 +1,483 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
|
||||
from __future__ import print_function
|
||||
from getopt import getopt, GetoptError
|
||||
from time import time, gmtime
|
||||
from datetime import datetime
|
||||
import os, sys, signal, glob, re
|
||||
from seiscomp.myconfig import MyConfig
|
||||
import seiscomp.slclient
|
||||
import seiscomp.kernel, seiscomp.config
|
||||
|
||||
usage_info = """
|
||||
Usage:
|
||||
slmon [options]
|
||||
|
||||
SeedLink monitor creating static web pages
|
||||
|
||||
Options:
|
||||
-h, --help display this help message
|
||||
-c ini_setup = arg
|
||||
-s ini_stations = arg
|
||||
-t refresh = float(arg) # XXX not yet used
|
||||
-v verbose = 1
|
||||
|
||||
Examples:
|
||||
Start slmon from the command line
|
||||
slmon -c $SEISCOMP_ROOT/var/lib/slmon/config.ini
|
||||
|
||||
Restart slmon in order to update the web pages. Use crontab entries for
|
||||
automatic restart, e.g.:
|
||||
*/3 * * * * /home/sysop/seiscomp/bin/seiscomp check slmon >/dev/null 2>&1
|
||||
"""
|
||||
|
||||
def usage(exitcode=0):
|
||||
sys.stderr.write(usage_info)
|
||||
exit(exitcode)
|
||||
|
||||
try:
|
||||
seiscompRoot=os.environ["SEISCOMP_ROOT"]
|
||||
except:
|
||||
print("\nSEISCOMP_ROOT must be defined - EXIT\n", file=sys.stderr)
|
||||
usage(exitcode=2)
|
||||
|
||||
ini_stations = os.path.join(seiscompRoot,'var/lib/slmon/stations.ini')
|
||||
ini_setup = os.path.join(seiscompRoot,'var/lib/slmon/config.ini')
|
||||
|
||||
regexStreams = re.compile("[SLBVEH][HNLG][ZNE123]")
|
||||
|
||||
verbose = 0
|
||||
|
||||
class Module(seiscomp.kernel.Module):
|
||||
def __init__(self, env):
|
||||
seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__))
|
||||
|
||||
def printCrontab(self):
|
||||
print("3 * * * * %s/bin/seiscomp check slmon >/dev/null 2>&1" % (self.env.SEISCOMP_ROOT))
|
||||
|
||||
class Status:
|
||||
|
||||
def __repr__(self):
|
||||
return "%2s %-5s %2s %3s %1s %s %s" % \
|
||||
(self.net, self.sta, self.loc, self.cha, self.typ, \
|
||||
str(self.last_data), str(self.last_feed))
|
||||
class StatusDict(dict):
|
||||
|
||||
def __init__(self, source=None):
|
||||
if source:
|
||||
self.read(source)
|
||||
|
||||
def fromSlinkTool(self,server="",stations=["GE_MALT","GE_MORC","GE_IBBN"]):
|
||||
# later this shall use XML
|
||||
cmd = "slinktool -nd 10 -nt 10 -Q %s" % server
|
||||
print(cmd)
|
||||
f = os.popen(cmd)
|
||||
# regex = re.compile("[SLBVEH][HNLG][ZNE123]")
|
||||
regex = regexStreams
|
||||
for line in f:
|
||||
net_sta = line[:2].strip() + "_" + line[3:8].strip()
|
||||
if not net_sta in stations:
|
||||
continue
|
||||
typ = line[16]
|
||||
if typ != "D":
|
||||
continue
|
||||
cha = line[12:15].strip()
|
||||
if not regex.match(cha):
|
||||
continue
|
||||
|
||||
d = Status()
|
||||
d.net = line[ 0: 2].strip()
|
||||
d.sta = line[ 3: 8].strip()
|
||||
d.loc = line[ 9:11].strip()
|
||||
d.cha = line[12:15]
|
||||
d.typ = line[16]
|
||||
d.last_data = seiscomp.slclient.timeparse(line[47:70])
|
||||
d.last_feed = d.last_data
|
||||
sec = "%s_%s" % (d.net, d.sta)
|
||||
sec = "%s.%s.%s.%s.%c" % (d.net, d.sta, d.loc, d.cha, d.typ)
|
||||
self[sec] = d
|
||||
|
||||
def read(self, source):
|
||||
if type(source) == str:
|
||||
source = file(source)
|
||||
if type(source) == file:
|
||||
source = source.readlines()
|
||||
if type(source) != list:
|
||||
raise TypeError('cannot read from %s' % str(type(source)))
|
||||
|
||||
for line in source:
|
||||
d = Status()
|
||||
d.net = line[ 0: 2]
|
||||
d.sta = line[ 3: 8].strip()
|
||||
d.loc = line[ 9:11].strip()
|
||||
d.cha = line[12:15]
|
||||
d.typ = line[16]
|
||||
d.last_data = seiscomp.slclient.timeparse(line[18:41])
|
||||
d.last_feed = seiscomp.slclient.timeparse(line[42:65])
|
||||
if d.last_feed < d.last_data:
|
||||
d.last_feed = d.last_data
|
||||
sec = "%s_%s:%s.%s.%c" % (d.net, d.sta, d.loc, d.cha, d.typ)
|
||||
self[sec] = d
|
||||
|
||||
def write(self, f):
|
||||
if type(f) is str:
|
||||
f = file(f, "w")
|
||||
lines = []
|
||||
for key in list(self.keys()):
|
||||
lines.append(str(self[key]))
|
||||
lines.sort()
|
||||
f.write('\n'.join(lines)+'\n')
|
||||
|
||||
|
||||
def colorLegend(htmlfile):
|
||||
htmlfile.write("<p><center>Latencies:<br>\n" \
|
||||
"<table cellpadding='2' cellspacing='1' border='0'" \
|
||||
" bgcolor='#000000'>\n<tr>\n" \
|
||||
"<td bgcolor='#cc99ff'> <30 m </td>\n" \
|
||||
"<td bgcolor='#3399ff'> < 1 h </td>\n" \
|
||||
"<td bgcolor='#00ff00'> < 2 h </td>\n" \
|
||||
"<td bgcolor='#ffff00'> < 6 h </td>\n" \
|
||||
"<td bgcolor='#ff9966'> < 1 d </td>\n" \
|
||||
"<td bgcolor='#ff3333'> < 2 d </td>\n" \
|
||||
"<td bgcolor='#ffcccc'> < 3 d </td>\n" \
|
||||
"<td bgcolor='#cccccc'> < 4 d </td>\n" \
|
||||
"<td bgcolor='#999999'> < 5 d </td>\n" \
|
||||
"<td bgcolor='#666666'> > 5 d </td>\n" \
|
||||
"</tr>\n</table>\n</center></p>\n")
|
||||
|
||||
# encodes an email address so that it cannot (easily) be extracted
|
||||
# from the web page. This is meant to be a spam protection.
|
||||
def encode(txt): return ''.join(["&#%d;" % ord(c) for c in txt])
|
||||
|
||||
def total_seconds(td): return td.seconds + (td.days*86400)
|
||||
|
||||
def pageTrailer(htmlfile, config):
|
||||
|
||||
htmlfile.write("<hr>\n" \
|
||||
"<table width='99%%' cellpaddding='2' cellspacing='1' border='0'>\n" \
|
||||
"<tr>\n<td>Last updated %04d/%02d/%02d %02d:%02d:%02d UTC</td>\n" \
|
||||
" <td align='right'><a href='%s' " \
|
||||
"target='_top'>%s</a></td>\n</tr>\n" \
|
||||
"</table>\n</body></html>\n" % (gmtime()[:6] + (config['setup']['linkurl'],) + (config['setup']['linkname'],)) )
|
||||
|
||||
def getColor(delta):
|
||||
delay = total_seconds(delta)
|
||||
if delay >432000: return '#666666'
|
||||
if delay >345600: return '#999999'
|
||||
if delay >259200: return '#cccccc'
|
||||
if delay >172800: return '#ffcccc'
|
||||
if delay > 86400: return '#ff3333'
|
||||
elif delay > 21600: return '#ff9966'
|
||||
elif delay > 7200: return '#ffff00'
|
||||
elif delay > 3600: return '#00ff00'
|
||||
elif delay > 1800: return '#3399ff'
|
||||
else: return '#cc99ff'
|
||||
|
||||
TDdummy = "<td align='center' bgcolor='%s'><tt>n/a</tt></td>"
|
||||
|
||||
def TDf(delta, col="#ffffff"):
|
||||
if delta is None: return TDdummy % col
|
||||
|
||||
t = total_seconds(delta)
|
||||
|
||||
if t > 86400: x = "%.1f d" % (t/86400.)
|
||||
elif t > 7200: x = "%.1f h" % (t/3600.)
|
||||
elif t > 120: x = "%.1f m" % (t/60.)
|
||||
else: x = "%.1f s" % (t)
|
||||
return "<td align='right' bgcolor='%s'><tt> %s </tt></td>" % \
|
||||
(col,x)
|
||||
|
||||
def TDt(t, col="#ffffff"):
|
||||
if t is None: return TDdummy % col
|
||||
|
||||
x = t.strftime("%Y/%m/%d %H:%M:%S")
|
||||
return "<td align='center' bgcolor='%s'><tt> %s </tt></td>" % \
|
||||
(col,x)
|
||||
|
||||
def myrename(name1, name2):
|
||||
|
||||
# fault-tolerant rename that doesn't cause an exception if it fails, which
|
||||
# may happen e.g. if the target is on a non-reachable NFS directory
|
||||
try:
|
||||
os.rename(name1, name2)
|
||||
except OSError:
|
||||
print("failed to rename(%s,%s)" % (name1, name2), file=sys.stderr)
|
||||
|
||||
|
||||
def makeMainHTML(config):
|
||||
|
||||
global status
|
||||
|
||||
now = datetime.utcnow()
|
||||
|
||||
stations = []
|
||||
|
||||
streams = [ x for x in list(status.keys()) if regexStreams.search(x) ]
|
||||
|
||||
streams.sort()
|
||||
|
||||
tmp_rt = []
|
||||
tmp_du = []
|
||||
|
||||
for label in streams:
|
||||
lat1 = now - status[label].last_data # XXX
|
||||
lat2 = now - status[label].last_feed # XXX
|
||||
lat3 = lat1-lat2 # XXX
|
||||
if lat3 == 0.: lat3 = lat2 = None
|
||||
|
||||
if label[-2]=='.' and label[-1] in "DE":
|
||||
label = label[:-2]
|
||||
n,s,x,x = label.split(".")
|
||||
if s in stations: continue # avoid duplicates for different locations
|
||||
stations.append(s)
|
||||
|
||||
net_sta = "%s_%s" % (n,s)
|
||||
line = "<tr bgcolor='#ffffff'><td><tt> %s <a " \
|
||||
"href='%s.html'>%s</a> </td>%s%s%s</tr>" \
|
||||
% (n, net_sta, s, TDf(lat1, getColor(lat1)),
|
||||
TDf(lat2, getColor(lat2)),
|
||||
TDf(lat3, getColor(lat3)))
|
||||
if config.station[net_sta]['type'][:4] == 'real':
|
||||
tmp_rt.append(line)
|
||||
else: tmp_du.append(line)
|
||||
makeStatHTML(net_sta, config)
|
||||
|
||||
try: os.makedirs(config['setup']['wwwdir'])
|
||||
except: pass
|
||||
|
||||
temp = "%s/tmp.html" % config['setup']['wwwdir']
|
||||
dest = "%s/index.html" % config['setup']['wwwdir']
|
||||
|
||||
table_begin = """
|
||||
<table cellpaddding='2' cellspacing='1' border='0' bgcolor='#000000'>
|
||||
<tr>
|
||||
<th bgcolor='#ffffff' rowspan='2' align='center'>Station</th>
|
||||
<th bgcolor='#ffffff' colspan='3' align='center'>Latencies</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th bgcolor='#ffffff' align='center'>Data</th>
|
||||
<th bgcolor='#ffffff' align='center'>Feed</th>
|
||||
<th bgcolor='#ffffff' align='center'>Diff.</th>
|
||||
</tr>
|
||||
"""
|
||||
table_end = """
|
||||
</table>
|
||||
"""
|
||||
|
||||
htmlfile = open(temp, "w")
|
||||
htmlfile.write("""<html>
|
||||
<head>
|
||||
<title>%s</title>
|
||||
<meta http-equiv='refresh' content='%d'>
|
||||
<link rel='SHORTCUT ICON' href='%s'>
|
||||
</head>
|
||||
<body bgcolor='#ffffff'>
|
||||
<center><font size='+2'>%s</font></center>\n""" % \
|
||||
( config['setup']['title'], int(config['setup']['refresh']),
|
||||
config['setup']['icon'], config['setup']['title']))
|
||||
|
||||
|
||||
htmlfile.write("<center><table cellpaddding='5' cellspacing='5'><tr>\n")
|
||||
if len(tmp_rt):
|
||||
htmlfile.write("<td valign='top' align='center'>\n" \
|
||||
"<font size='+1'>Real-time stations<font>\n</td>\n")
|
||||
if len(tmp_du):
|
||||
htmlfile.write("<td valign='top' align='center'>\n" \
|
||||
"<font size='+1'>Dial-up stations<font>\n</td>\n")
|
||||
htmlfile.write("</tr><tr>")
|
||||
if len(tmp_rt):
|
||||
htmlfile.write("<td valign='top' align='center'>\n")
|
||||
htmlfile.write(table_begin)
|
||||
htmlfile.write("\n".join(tmp_rt))
|
||||
htmlfile.write(table_end)
|
||||
htmlfile.write("</td>\n")
|
||||
if len(tmp_du):
|
||||
htmlfile.write("<td valign='top' align='center'>\n")
|
||||
htmlfile.write(table_begin)
|
||||
htmlfile.write("\n".join(tmp_du))
|
||||
htmlfile.write(table_end)
|
||||
htmlfile.write("</td>\n")
|
||||
htmlfile.write("</tr></table></center>\n")
|
||||
|
||||
colorLegend(htmlfile)
|
||||
pageTrailer(htmlfile, config)
|
||||
htmlfile.close()
|
||||
myrename(temp, dest)
|
||||
|
||||
|
||||
def makeStatHTML(net_sta, config):
|
||||
global status
|
||||
|
||||
try: os.makedirs(config['setup']['wwwdir'])
|
||||
except: pass
|
||||
|
||||
temp = "%s/tmp2.html" % config['setup']['wwwdir']
|
||||
dest = "%s/%s.html" % ( config['setup']['wwwdir'], net_sta)
|
||||
|
||||
htmlfile = open(temp, "w")
|
||||
htmlfile.write("""<html>
|
||||
<head>
|
||||
<title>%s - Station %s</title>
|
||||
<meta http-equiv='refresh' content='%d'>
|
||||
<link rel='SHORTCUT ICON' href='%s'>
|
||||
</head>
|
||||
<body bgcolor='#ffffff'>
|
||||
<center><font size='+2'>%s - Station %s</font>\n""" % \
|
||||
( config['setup']['title'], net_sta, int(config['setup']['refresh']),
|
||||
config['setup']['icon'],
|
||||
config['setup']['title'], net_sta.split("_")[-1]))
|
||||
|
||||
try:
|
||||
name = config.station[net_sta]['info']
|
||||
htmlfile.write("<br><font size='+1'>%s</font>" % name)
|
||||
except: pass
|
||||
htmlfile.write("</center>\n")
|
||||
|
||||
if 'text' in config.station[net_sta]:
|
||||
htmlfile.write("<P>%s</P>\n" % config.station[net_sta]['text'])
|
||||
|
||||
htmlfile.write("""<p><center>
|
||||
<table cellpadding='2' cellspacing='1' border='0' bgcolor='#000000'>
|
||||
<tr>
|
||||
<th bgcolor='#ffffff' align='center' rowspan='2'>Station/<br>Channel</th>
|
||||
<th bgcolor='#ffffff' align='center' colspan='2'>Data</th>
|
||||
<th bgcolor='#ffffff' align='center' colspan='2'>Feed</th>
|
||||
<th bgcolor='#ffffff' align='center' rowspan='2'>Diff.</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th bgcolor='#ffffff' align='center'>Last Sample</th>
|
||||
<th bgcolor='#ffffff' align='center'>Latency</th>
|
||||
<th bgcolor='#ffffff' align='center'>Last Received</th>
|
||||
<th bgcolor='#ffffff' align='center'>Latency</th>
|
||||
</tr>""")
|
||||
|
||||
now = datetime.utcnow()
|
||||
|
||||
netsta2=net_sta.replace("_",".")
|
||||
streams = [ x for x in list(status.keys()) if x.find(netsta2)==0 ]
|
||||
streams.sort()
|
||||
for label in streams:
|
||||
tim1 = status[label].last_data
|
||||
tim2 = status[label].last_feed
|
||||
|
||||
lat1, lat2, lat3 = now-tim1, now-tim2, tim2-tim1
|
||||
col1, col2, col3 = getColor(lat1), getColor(lat2), getColor(lat3)
|
||||
if lat1==lat2: lat2 = lat3 = None
|
||||
if label[-2]=='.' and label[-1] in "DE":
|
||||
label = label[:-2]
|
||||
n,s,loc,c = label.split(".")
|
||||
c = ("%s.%s" % (loc,c)).strip(".")
|
||||
htmlfile.write("<tr bgcolor='#ffffff'><td>" \
|
||||
"<tt> %s %s </td>%s%s%s%s%s</tr>\n" \
|
||||
% (s, c, TDt(tim1, col1), TDf(lat1, col1),
|
||||
TDt(tim2, col2), TDf(lat2, col2),
|
||||
TDf(lat3, col3)))
|
||||
|
||||
htmlfile.write("</table></p>\n")
|
||||
colorLegend(htmlfile)
|
||||
|
||||
htmlfile.write("<p>\nHow to <a href='http://geofon.gfz-potsdam.de/waveform/status/latency.php' target='_blank'>interpret</a> " \
|
||||
"these numbers?<br>\n")
|
||||
if 'liveurl' in config['setup']:
|
||||
# substitute '%s' in live_url by station name
|
||||
url = config['setup']['liveurl'] % s
|
||||
htmlfile.write("View a <a href='%s' target='_blank'>live seismogram</a> of "
|
||||
"station %s</center>\n" % (url, s))
|
||||
htmlfile.write("</p>\n")
|
||||
pageTrailer(htmlfile, config)
|
||||
htmlfile.close()
|
||||
myrename(temp, dest)
|
||||
|
||||
def read_ini():
|
||||
global config, ini_setup, ini_stations
|
||||
print("\nreading setup config from '%s'" % ini_setup)
|
||||
if not os.path.isfile(ini_setup):
|
||||
print("[error] setup config '%s' does not exist" % ini_setup, file=sys.stderr)
|
||||
usage(exitcode=2)
|
||||
|
||||
config = MyConfig(ini_setup)
|
||||
print("reading station config from '%s'" % ini_stations)
|
||||
if not os.path.isfile(ini_stations):
|
||||
print("[error] station config '%s' does not exist" % ini_stations, file=sys.stderr)
|
||||
usage(exitcode=2)
|
||||
config.station = MyConfig(ini_stations)
|
||||
|
||||
def SIGINT_handler(signum, frame):
|
||||
global status
|
||||
print("received signal #%d => will write status file and exit" % signum)
|
||||
# status.write("status.tab")
|
||||
sys.exit(0)
|
||||
|
||||
try:
|
||||
opts, args = getopt(sys.argv[1:], "c:s:t:hv")
|
||||
except GetoptError:
|
||||
print("\nUnknown option in "+str(sys.argv[1:])+" - EXIT.", file=sys.stderr)
|
||||
usage(exitcode=2)
|
||||
|
||||
for flag, arg in opts:
|
||||
if flag == "-c": ini_setup = arg
|
||||
if flag == "-s": ini_stations = arg
|
||||
if flag == "-t": refresh = float(arg) # XXX not yet used
|
||||
if flag == "-h": usage(exitcode=0)
|
||||
if flag == "-v": verbose = 1
|
||||
|
||||
|
||||
signal.signal(signal.SIGHUP, SIGINT_handler)
|
||||
signal.signal(signal.SIGINT, SIGINT_handler)
|
||||
signal.signal(signal.SIGQUIT, SIGINT_handler)
|
||||
signal.signal(signal.SIGTERM, SIGINT_handler)
|
||||
|
||||
read_ini()
|
||||
|
||||
cha = "???"
|
||||
loc = ""
|
||||
|
||||
s = config.station
|
||||
net_sta = ["%s_%s" % (s[k]['net'],s[k]['sta']) for k in s]
|
||||
s_arg = ','.join(net_sta)
|
||||
streams = [ (s[k]['net'],s[k]['sta'],loc,cha) for k in s ]
|
||||
|
||||
|
||||
if 'server' in config['setup']:
|
||||
server = config['setup']['server']
|
||||
else: server = "localhost"
|
||||
|
||||
#def read_initial(config):
|
||||
#
|
||||
# for s in config.station:
|
||||
# print s,glob.glob("/home/dcop/seedlink/%s/segments/*" % s)
|
||||
# for f in glob.glob("/home/dcop/seedlink/%s/segments/*" % s):
|
||||
# print f
|
||||
#
|
||||
#read_initial(config)
|
||||
|
||||
|
||||
#print "reading initial time windows from file 'status.tab'"
|
||||
#status = StatusDict("status.tab")
|
||||
status = StatusDict()
|
||||
#if verbose: status.write(sys.stderr)
|
||||
|
||||
|
||||
print("generating output to '%s'" % config['setup']['wwwdir'])
|
||||
|
||||
print("getting initial time windows from SeedLink server '%s'" % server)
|
||||
status.fromSlinkTool(server, stations=net_sta)
|
||||
if verbose: status.write(sys.stderr)
|
||||
|
||||
nextTimeGenerateHTML = time()
|
||||
|
||||
print("setting up connection to SeedLink server '%s'" % server)
|
||||
|
||||
input = seiscomp.slclient.Input(server, streams)
|
||||
for rec in input:
|
||||
id = '.'.join([rec.net, rec.sta, rec.loc, rec.cha, rec.rectype])
|
||||
# if not id in status: continue # XXX XXX XXX
|
||||
try:
|
||||
status[id].last_data = rec.end_time
|
||||
status[id].last_feed = datetime.utcnow()
|
||||
except:
|
||||
continue
|
||||
|
||||
if time() > nextTimeGenerateHTML:
|
||||
makeMainHTML(config)
|
||||
nextTimeGenerateHTML = time() + int(config['setup']['refresh'])
|
@ -0,0 +1,88 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
from nettab.tab import Tab
|
||||
import seiscomp.io
|
||||
|
||||
def main():
|
||||
# Creating the parser
|
||||
parser = OptionParser(usage="Tab to Inventory (sc3) converter", version="1.0", add_help_option=True)
|
||||
|
||||
parser.add_option("-i", "--ip", type="string",
|
||||
help="Prefix to be added to each instrument generated.", dest="instrumentPrefix", default=None)
|
||||
|
||||
parser.add_option("-f", "--filterf", type="string",
|
||||
help="Indicates a folder containing the filters coefficients files", dest="ffolder", default=None)
|
||||
|
||||
parser.add_option("-x", "--xmlf", type="string",
|
||||
help="Indicates a folder containing the XML inventory files (needed for station group support)", dest="xfolder", default=None)
|
||||
|
||||
parser.add_option("-D", "--database", type="string",
|
||||
help="Database URL for inventory (needed for station group support)", dest="database", default=None)
|
||||
|
||||
parser.add_option("", "--force", action="store_true",
|
||||
help="Don't stop on error of individual files", dest="force", default=False)
|
||||
|
||||
parser.add_option("-g", "--generate", action="store_true",
|
||||
help="Generate XML file at the end", dest="generate", default=False)
|
||||
|
||||
parser.add_option("-c", "--check", action="store_true",
|
||||
help="Check the loaded files", dest="check", default=False)
|
||||
|
||||
parser.add_option("-d", "--default", type="string",
|
||||
help="Indicates the default file", dest="defaultFile", default=None)
|
||||
|
||||
parser.add_option("-o", "--output", type="string",
|
||||
help="Indicates the output file", dest="outFile", default="-")
|
||||
|
||||
# Parsing & Error check
|
||||
(options, args) = parser.parse_args()
|
||||
error = False
|
||||
|
||||
if len(args) < 1:
|
||||
print("No input file(s) to digest", file=sys.stderr)
|
||||
error = True
|
||||
|
||||
if error:
|
||||
print("Use -h for help on usage", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Execution
|
||||
try:
|
||||
inv = None
|
||||
t=Tab(options.instrumentPrefix, options.defaultFile, options.ffolder, options.xfolder, options.database)
|
||||
for f in args:
|
||||
try:
|
||||
t.digest(f)
|
||||
except Exception as e:
|
||||
print("Error digesting %s:\n %s" % (f, e), file=sys.stderr)
|
||||
if not options.force:
|
||||
raise e
|
||||
|
||||
if options.check:
|
||||
t.check()
|
||||
return
|
||||
|
||||
if options.generate:
|
||||
inv = t.sc3Obj()
|
||||
if inv:
|
||||
ar = seiscomp.io.XMLArchive()
|
||||
print("Generating file: %s" % options.outFile, file=sys.stderr)
|
||||
ar.create(options.outFile)
|
||||
ar.setFormattedOutput(True)
|
||||
ar.setCompression(False)
|
||||
ar.writeObject(inv)
|
||||
ar.close()
|
||||
except Exception as e:
|
||||
print("Error: " + str(e), file=sys.stderr)
|
||||
return 1
|
||||
finally:
|
||||
print("Ending.", file=sys.stderr)
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == "__main__":
|
||||
ret = main()
|
||||
sys.exit(ret)
|
@ -0,0 +1,526 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from nettab.convertUtils import StationAttributes, NetworkAttributes, StationMappings, parseDate, formatDate, quote, hummanStr
|
||||
from nettab.tab import Tab
|
||||
from optparse import OptionParser
|
||||
from nettab.nodesi import Instruments
|
||||
|
||||
class TabConverter:
|
||||
def __init__(self, networkCode):
|
||||
self.__fmt__ = None
|
||||
self.takeSugestions = None
|
||||
|
||||
self.filename = None
|
||||
|
||||
self.networkCode = networkCode
|
||||
self.stationList = None
|
||||
|
||||
self.nat = None
|
||||
self.sat = None
|
||||
self.sma = None
|
||||
self.inst = None
|
||||
self.defaultEpoch = parseDate("1980/001")
|
||||
|
||||
self.start=0
|
||||
self.code=0
|
||||
self.description=0
|
||||
self.datalogger=0
|
||||
self.sensor=0
|
||||
self.channel=0
|
||||
self.gaind = 0
|
||||
self.longitude=0
|
||||
self.latitude=0
|
||||
self.elevation=0
|
||||
self.end=0
|
||||
self.depth=0
|
||||
self.orientation=0
|
||||
|
||||
## default dates
|
||||
self.startDate = parseDate("1980/001")
|
||||
self.endDate = parseDate(None)
|
||||
|
||||
def loadStationMapping(self, filename):
|
||||
if self.networkCode is None: raise Exception("Cannot load Station mapping without network code")
|
||||
if self.stationList is None: raise Exception("Cannot load Station mapping without station list")
|
||||
|
||||
try:
|
||||
sm = StationMappings(self.networkCode, self.stationList, filename)
|
||||
self.sma = sm
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def loadStationAttribute(self, filename):
|
||||
if self.networkCode is None: raise Exception("Cannot load Station att without network code")
|
||||
if self.stationList is None: raise Exception("Cannot load Station att without station list")
|
||||
|
||||
try:
|
||||
sa = StationAttributes(self.networkCode, self.stationList, filename)
|
||||
self.sat = sa
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def loadNetworkAttribute(self, filename):
|
||||
if self.networkCode is None: raise Exception("Cannot load Network att without network code")
|
||||
if self.stationList is None: raise Exception("Cannot load Network att without station list")
|
||||
try:
|
||||
na = NetworkAttributes(self.networkCode, filename)
|
||||
self.nat = na
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def loadInstrumentsFile(self, filename, filterFolder):
|
||||
tab = Tab(filterFolder=filterFolder)
|
||||
tab.digest(filename)
|
||||
if tab.i:
|
||||
self.inst = tab.i
|
||||
|
||||
def __fmtline__(self):
|
||||
if not self.__fmt__:
|
||||
fmt = "Sl: "
|
||||
fmt += "%%-%ds" % self.code
|
||||
fmt += " %%-%ds" % self.description
|
||||
fmt += " %%-%ds" % self.datalogger
|
||||
fmt += " %%-%ds" % self.sensor
|
||||
fmt += " %%-%ds" % self.channel
|
||||
fmt += " %%-%ds" % self.orientation
|
||||
fmt += " %%-%ds" % self.latitude
|
||||
fmt += " %%-%ds" % self.longitude
|
||||
fmt += " %%-%ds" % self.elevation
|
||||
fmt += " %%-%ds" % self.depth
|
||||
fmt += " %%-%ds" % self.start
|
||||
fmt += " %%-%ds" % self.end
|
||||
self.__fmt__ = fmt
|
||||
|
||||
return self.__fmt__
|
||||
|
||||
def __analyseLine__(self, items):
|
||||
inputLine = " ".join(items)
|
||||
if len(items) < 4:
|
||||
raise Exception("Invalid items count on line %s" % inputLine)
|
||||
|
||||
if len(items) <= 5:
|
||||
netCode = items[2]
|
||||
if netCode != self.networkCode:
|
||||
raise Exception("Tab file (%s) doesn't match class (%s) -- %s" % (netCode,self.networkCode,inputLine))
|
||||
return [None, None, None]
|
||||
else:
|
||||
if len(items) < 6:
|
||||
raise Exception("Invalid Station line %s" % inputLine)
|
||||
|
||||
stationCode = items.pop(0)
|
||||
code = len(stationCode)
|
||||
self.code=max(self.code,code)
|
||||
|
||||
description = len(quote(hummanStr(items.pop(0))))
|
||||
self.description=max(self.description, description)
|
||||
|
||||
datalogger = len(items.pop(0))
|
||||
self.datalogger=max(self.datalogger, datalogger)
|
||||
|
||||
sensor = len(items.pop(0))
|
||||
self.sensor=max(self.sensor, sensor)
|
||||
|
||||
# Gain
|
||||
gaind = items.pop(0)
|
||||
if float(gaind) != 1.0:
|
||||
self.datalogger = max (self.datalogger, datalogger + len(gaind))
|
||||
|
||||
channel = len(items.pop(0))
|
||||
self.channel=max(self.channel, channel)
|
||||
|
||||
latitude = len(items.pop(0))
|
||||
self.latitude=max(self.latitude, latitude)
|
||||
|
||||
longitude = len(items.pop(0))
|
||||
self.longitude=max(self.longitude, longitude)
|
||||
|
||||
elevation = len(items.pop(0))
|
||||
self.elevation=max(self.elevation, elevation)
|
||||
|
||||
#Orientation
|
||||
depth = items.pop(0)
|
||||
try:
|
||||
float(depth)
|
||||
orientation="ZNE"
|
||||
except:
|
||||
orientation = "Z"
|
||||
(depth,a1,a2) = depth.split("/")
|
||||
|
||||
a1n = float(a1)
|
||||
a2n = float(a2)
|
||||
|
||||
orientation+="1"
|
||||
if a1n != 0.0: orientation += "(0.0,%s)"%a1
|
||||
|
||||
orientation+="2"
|
||||
if a2n != 90.0: orientation+="(0.0,%s)"%a1
|
||||
|
||||
orientation = len(orientation)
|
||||
self.orientation=max(self.orientation, orientation)
|
||||
|
||||
depth = len(depth)
|
||||
self.depth=max(self.depth, depth)
|
||||
|
||||
# Start
|
||||
try:
|
||||
start = parseDate(items.pop(0))
|
||||
self.start = max (self.start, len(formatDate(start)))
|
||||
except:
|
||||
raise Exception ("Invalid Station line start date %s" % inputLine)
|
||||
|
||||
# End
|
||||
try:
|
||||
end = parseDate(items.pop(0))
|
||||
except:
|
||||
end=parseDate("")
|
||||
pass
|
||||
self.end = max (self.end, len(formatDate(end)))
|
||||
|
||||
return [stationCode, start, end]
|
||||
|
||||
def preload(self, filename, takeSugestions):
|
||||
self.takeSugestions = takeSugestions
|
||||
sugestedStart = datetime.now()
|
||||
sugestedEnd = self.defaultEpoch
|
||||
stationList = []
|
||||
|
||||
error = []
|
||||
|
||||
# Some initialization
|
||||
if self.filename is not None:
|
||||
raise Exception("Cannot pre-load two different files (current one is %s)" % self.filename)
|
||||
|
||||
print("Analysing ... ", file=sys.stderr)
|
||||
fd = open(filename)
|
||||
for line in fd:
|
||||
line = line.strip()
|
||||
if not line or line[0] == "#": continue
|
||||
|
||||
try:
|
||||
(stationCode, start, end) = self.__analyseLine__(line.split())
|
||||
except Exception as e:
|
||||
error.append(str(e))
|
||||
continue
|
||||
|
||||
if not stationCode: continue
|
||||
if stationCode not in stationList:
|
||||
stationList.append(stationCode)
|
||||
|
||||
sugestedStart = min(sugestedStart, start)
|
||||
if end and sugestedEnd:
|
||||
sugestedEnd = max(sugestedEnd, end)
|
||||
else:
|
||||
sugestedEnd = None
|
||||
fd.close()
|
||||
|
||||
if len(error):
|
||||
raise Exception("\n".join(error))
|
||||
|
||||
print(" Loaded %d different stations" % len(stationList), file=sys.stderr)
|
||||
if takeSugestions:
|
||||
print(" Taking suggestion start date of %s " % formatDate(self.startDate), file=sys.stderr)
|
||||
self.startDate = sugestedStart
|
||||
print(" Taking suggestion end date of %s " % formatDate(self.endDate), file=sys.stderr)
|
||||
self.endDate = sugestedEnd
|
||||
|
||||
self.filename = filename
|
||||
self.stationList = stationList
|
||||
print("Done.", file=sys.stderr)
|
||||
|
||||
def __convertHeader__(self, line, fdo):
|
||||
|
||||
# Split line
|
||||
items = line.split()
|
||||
|
||||
if not self.takeSugestions:
|
||||
if self.nat.hasStart:
|
||||
print(" Using start from attribute.", file=sys.stderr)
|
||||
self.startDate = self.nat.startDate
|
||||
if self.nat.hasEnd:
|
||||
print(" Using end from attribute.", file=sys.stderr)
|
||||
self.endDate = self.nat.endDate
|
||||
|
||||
nCode = items[2].strip()
|
||||
if nCode != self.networkCode:
|
||||
raise Exception("Wrong network code found: %s != %s" % (self.networkCode, nCode))
|
||||
|
||||
fdo.write("Nw: %s %s %s" % (nCode, formatDate(self.startDate), formatDate(self.endDate)) + "\n")
|
||||
|
||||
self.nat.dump(fdo)
|
||||
|
||||
def __convertLine__(self, line, fdo, atFront):
|
||||
lnfmt = self.__fmtline__()
|
||||
|
||||
# Split line
|
||||
items = line.split()
|
||||
|
||||
try:
|
||||
code = items.pop(0)
|
||||
except Exception as e:
|
||||
raise Exception ("Missing Code on %s" % line)
|
||||
|
||||
if code not in self.stationList:
|
||||
raise Exception("Unknow station code $s" % code)
|
||||
|
||||
try:
|
||||
hummanStr(items.pop(0))
|
||||
except Exception as e:
|
||||
raise Exception ("Missing Gain on %s" % line)
|
||||
|
||||
try:
|
||||
datalogger = items.pop(0)
|
||||
except Exception as e:
|
||||
raise Exception ("Missing Datalogger on %s" % line)
|
||||
|
||||
try:
|
||||
sensor = items.pop(0)
|
||||
except Exception as e:
|
||||
raise Exception ("Missing Sensor on %s" % line)
|
||||
|
||||
try:
|
||||
gaind = items.pop(0)
|
||||
if float(gaind) != 1.0:
|
||||
if not self.inst:
|
||||
raise Exception("Instrument database needed to convert gain")
|
||||
try:
|
||||
dte = self.inst.dls[str(datalogger).split("%")[0]]
|
||||
except Exception as e:
|
||||
print(e, file=sys.stderr)
|
||||
raise Exception("Datalogger %s not found" % str(datalogger).split("%")[0])
|
||||
datalogger += "%%%s" % (float(dte.gain) * float(gaind))
|
||||
print(" Converting gain multiplier to real gain using instrument DB on %s" % code, file=sys.stderr)
|
||||
except Exception as e:
|
||||
raise Exception ("Missing Gain on %s (%s)" % (line,str(e)))
|
||||
|
||||
try:
|
||||
channel = items.pop(0)
|
||||
except Exception as e:
|
||||
raise Exception ("Missing Channel on %s" % line)
|
||||
|
||||
try:
|
||||
latitude = items.pop(0)
|
||||
except Exception as e:
|
||||
raise Exception ("Missing Latitude on %s" % line)
|
||||
|
||||
try:
|
||||
longitude = items.pop(0)
|
||||
except Exception as e:
|
||||
raise Exception ("Missing Longitude on %s" % line)
|
||||
try:
|
||||
elevation = items.pop(0)
|
||||
except Exception as e:
|
||||
raise Exception ("Missing Elevation on %s" % line)
|
||||
|
||||
try:
|
||||
depth = items.pop(0)
|
||||
except Exception as e:
|
||||
raise Exception ("Missing Depth on %s" % line)
|
||||
|
||||
#Orientation
|
||||
try:
|
||||
float(depth)
|
||||
orientation = "ZNE"
|
||||
except:
|
||||
orientation = "Z"
|
||||
(depth,a1,a2) = depth.split("/")
|
||||
|
||||
a1n = float(a1)
|
||||
if a1n == 0.0:
|
||||
orientation+="1"
|
||||
else:
|
||||
orientation+="1(0.0,%s)"%a1
|
||||
|
||||
a2n = float(a2)
|
||||
if a2n == 90.0:
|
||||
orientation+="2"
|
||||
else:
|
||||
orientation+="2(0.0,%s)"%a2
|
||||
|
||||
# Start
|
||||
try:
|
||||
start = items.pop(0)
|
||||
except Exception:
|
||||
raise Exception ("Missing Start on %s" % line)
|
||||
|
||||
try:
|
||||
start = parseDate(start)
|
||||
except Exception as e:
|
||||
raise Exception("Invalide Start date: %s (%s) on %s" % (start, e, line))
|
||||
|
||||
#End
|
||||
try:
|
||||
end = items.pop(0)
|
||||
except:
|
||||
end = ""
|
||||
|
||||
try:
|
||||
end = parseDate(end)
|
||||
except Exception as e:
|
||||
raise Exception("Invalide End date: %s (%s) on %s" % (end, e, line))
|
||||
|
||||
[place, country] = self.sat.parseStationLine(line.split())
|
||||
description = "%s/%s" % (place, country)
|
||||
|
||||
## Prepare necessary output
|
||||
if not atFront:
|
||||
self.sma.dump(fdo, code)
|
||||
self.sat.dump(fdo, code)
|
||||
|
||||
for (start, end) in self.sma.getMappings(code, start, end):
|
||||
fdo.write(lnfmt % (code, quote(description), datalogger, sensor, channel, orientation, latitude, longitude, elevation, depth, formatDate(start), formatDate(end)) + "\n")
|
||||
|
||||
return code
|
||||
|
||||
def convert(self, fdo, keepcomments = False, atFront = True):
|
||||
if self.filename is None:
|
||||
raise Exception("You should pre-load a tab file before before converting.")
|
||||
|
||||
## Obtain additional attribute classes if needed
|
||||
if not self.nat:
|
||||
self.nat = NetworkAttributes(self.networkCode, None)
|
||||
if not self.sat:
|
||||
self.sat = StationAttributes(self.networkCode, self.stationList, None)
|
||||
if not self.sma:
|
||||
self.sma = StationMappings(self.networkCode, self.stationList, None)
|
||||
|
||||
# Parse in again the station lines and network header by the additional classes
|
||||
print("Pre-Parsing Station/Network lines ... ", file=sys.stderr)
|
||||
fd = open(self.filename)
|
||||
for line in fd:
|
||||
line = line.strip()
|
||||
if not line or line[0] == "#":
|
||||
continue
|
||||
items = line.split()
|
||||
if len(items) <= 5:
|
||||
self.nat.parseNetworkLine(items)
|
||||
elif len(items) <= 12:
|
||||
self.sma.parseStationLine(items)
|
||||
self.sat.parseStationLine(items)
|
||||
fd.close()
|
||||
|
||||
fd = open(self.filename)
|
||||
oldcode="" # Station code of the last printed line
|
||||
last="" # Type of the last printed line
|
||||
print("Converting ... ", file=sys.stderr)
|
||||
for line in fd:
|
||||
line = line.strip()
|
||||
if not line or line[0] == "#":
|
||||
if last == "l" or last == "a" or last == "h": fdo.write("\n")
|
||||
if keepcomments: fdo.write(line + "\n")
|
||||
last = "c"
|
||||
continue
|
||||
items = line.split()
|
||||
if len(items) <= 5:
|
||||
self.__convertHeader__(line, fdo)
|
||||
last = "h"
|
||||
if (atFront):
|
||||
fdo.write("\n")
|
||||
self.sma.dump(fdo, None)
|
||||
self.sat.dump(fdo, None)
|
||||
last = "a"
|
||||
fdo.write("\n")
|
||||
elif len(items) <= 12:
|
||||
if (last == "l" and items[0].strip() != oldcode) or last == "h": fdo.write("\n")
|
||||
oldcode = self.__convertLine__(line, fdo, atFront)
|
||||
last = "l"
|
||||
pass
|
||||
else:
|
||||
print("input at %s" % line, file=sys.stderr)
|
||||
fd.close()
|
||||
|
||||
def main():
|
||||
# Creating the parser
|
||||
parser = OptionParser(usage="Old tab to New tab converter", version="1.0", add_help_option=True)
|
||||
|
||||
parser.add_option("", "--instdb", type="string",
|
||||
help="Indicates the instrument databases file to use", dest="inst", default=None)
|
||||
parser.add_option("", "--smap", type="string",
|
||||
help="Indicates the station attribute file to use", dest="smap", default=None)
|
||||
parser.add_option("", "--sat", type="string",
|
||||
help="Indicates the station attribute file to use", dest="sat", default=None)
|
||||
parser.add_option("", "--nat", type="string",
|
||||
help="Indicates the station attribute file to use", dest="nat", default=None)
|
||||
parser.add_option("-t", "--tab", type="string",
|
||||
help="Indicates the tab file to convert", dest="tabFile", default=None)
|
||||
parser.add_option("-f", "--filterf", type="string",
|
||||
help="Indicates a folder containing the filters coefficients files", dest="ffolder", default=None)
|
||||
parser.add_option("-n", "--net", type="string",
|
||||
help="Indicates a two leter station code", dest="netCode", default=None)
|
||||
parser.add_option("-g", "--globalsa", action="store_true",
|
||||
help="Indicate that we should put a condensed version of the station attributes just below the network definition", dest="globalSa", default=False)
|
||||
parser.add_option("-a", "--autotime", action="store_true",
|
||||
help="Guess the start and end times for a network from the channel times", dest="autoTime", default=False)
|
||||
parser.add_option("-c", "--clean", action="store_true",
|
||||
help="Remove the comments and blank lines", dest="cleanFile", default=False)
|
||||
|
||||
# Parsing & Error check
|
||||
(options, args) = parser.parse_args()
|
||||
error = False
|
||||
|
||||
if len(args) != 1:
|
||||
print("need an Output Filename or '-' for stdout", file=sys.stderr)
|
||||
error = True
|
||||
|
||||
if not options.tabFile:
|
||||
print("tab file name not supplied", file=sys.stderr)
|
||||
error = True
|
||||
|
||||
if options.inst and not options.ffolder:
|
||||
print("Filter folder not supplied.", file=sys.stderr)
|
||||
error = True
|
||||
|
||||
if options.tabFile and not os.path.isfile(options.tabFile):
|
||||
print("supplied tab file (%s) is not a file" % options.tabFile, file=sys.stderr)
|
||||
error = True
|
||||
|
||||
if not options.netCode:
|
||||
print("network code not supplied", file=sys.stderr)
|
||||
error = True
|
||||
|
||||
#if options.autoTime and (options.netStart or options.netEnd):
|
||||
# print >> sys.stderr, "options Auto Time and Network Start/End times are exclusive"
|
||||
# return
|
||||
|
||||
if error:
|
||||
print("use -h for getting a help on usage", file=sys.stderr)
|
||||
return
|
||||
|
||||
if args[0] != "-":
|
||||
fdo = open(args[0], "w")
|
||||
else:
|
||||
fdo = sys.stdout
|
||||
|
||||
# Execution
|
||||
try:
|
||||
cnv = TabConverter(options.netCode.upper())
|
||||
cnv.preload(options.tabFile, options.autoTime)
|
||||
|
||||
if options.inst or options.smap or options.nat or options.sat:
|
||||
print("Loading optional files: ", file=sys.stderr)
|
||||
|
||||
if options.inst and os.path.isfile(options.inst):
|
||||
cnv.loadInstrumentsFile(options.inst, options.ffolder)
|
||||
|
||||
if options.smap and os.path.isfile(options.smap):
|
||||
cnv.loadStationMapping(options.smap)
|
||||
|
||||
if options.nat and os.path.isfile(options.nat):
|
||||
cnv.loadNetworkAttribute(options.nat)
|
||||
|
||||
if options.sat and os.path.isfile(options.sat):
|
||||
cnv.loadStationAttribute(options.sat)
|
||||
print("Done.", file=sys.stderr)
|
||||
|
||||
cnv.convert(fdo, not options.cleanFile, options.globalSa)
|
||||
except Exception as e:
|
||||
print("", file=sys.stderr)
|
||||
print("Error on processing: %s" % e, file=sys.stderr)
|
||||
|
||||
fdo.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -0,0 +1,380 @@
|
||||
#!/usr/bin/env seiscomp-python
|
||||
|
||||
################################################################################
|
||||
# Copyright (C) 2012-2013, 2020 Helmholtz-Zentrum Potsdam - Deutsches GeoForschungsZentrum GFZ
|
||||
#
|
||||
# tabinvmodifier -- Tool for inventory modification using nettab files.
|
||||
#
|
||||
# This software is free software and comes with ABSOLUTELY NO WARRANTY.
|
||||
#
|
||||
# Author: Marcelo Bianchi
|
||||
# Email: mbianchi@gfz-potsdam.de
|
||||
################################################################################
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
import datetime, time
|
||||
from nettab.lineType import Nw, Sa, Na, Ia
|
||||
from nettab.basesc3 import sc3
|
||||
import seiscomp.datamodel, seiscomp.io, seiscomp.client, seiscomp.core, seiscomp.logging
|
||||
|
||||
class Rules(object):
|
||||
def __init__(self, relaxed = False):
|
||||
self.relaxed = relaxed
|
||||
self.attributes = {}
|
||||
self.iattributes = []
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def _overlaps(pstart, pend, cstart, cend):
|
||||
if pend:
|
||||
if pend > cstart:
|
||||
if not cend or pstart < cend:
|
||||
return True
|
||||
else:
|
||||
if not cend or pstart < cend:
|
||||
return True
|
||||
return False
|
||||
|
||||
def Nw(self, nw):
|
||||
key = (nw.code, nw.start, nw.end)
|
||||
if key in self.attributes:
|
||||
raise Exception("Nw (%s/%s-%s) is already defined." % key)
|
||||
self.attributes[key] = {}
|
||||
self.attributes[key]["Sa"] = []
|
||||
self.attributes[key]["Na"] = []
|
||||
return key
|
||||
|
||||
def Sa(self, key, sa):
|
||||
try:
|
||||
items = self.attributes[key]["Sa"]
|
||||
except KeyError:
|
||||
raise Exception ("Nw %s/%s-%s not found in Ruleset" % key)
|
||||
items.append(sa)
|
||||
|
||||
def Na(self, key, na):
|
||||
try:
|
||||
items = self.attributes[key]["Na"]
|
||||
except KeyError:
|
||||
raise Exception ("Nw %s/%s-%s not found in Ruleset" % key)
|
||||
items.append(na)
|
||||
|
||||
def Ia(self, ia):
|
||||
self.iattributes.append(ia);
|
||||
|
||||
def findKey(self, ncode, nstart, nend):
|
||||
for (code, start, end) in self.attributes:
|
||||
if code == ncode and self._overlaps(start, end, nstart, nend):
|
||||
return (code, start, end)
|
||||
return None
|
||||
|
||||
def getInstrumentsAttributes(self, elementId, elementType):
|
||||
att = {}
|
||||
for item in self.iattributes:
|
||||
if item.match(elementId, elementType):
|
||||
att[item.Key] = item.Value
|
||||
return att
|
||||
|
||||
def getNetworkAttributes(self, key):
|
||||
att = {}
|
||||
for item in self.attributes[key]["Na"]:
|
||||
att[item.Key] = item.Value
|
||||
return att
|
||||
|
||||
def getStationAttributes(self, key, ncode, scode, lcode, ccode, start, end):
|
||||
att = {}
|
||||
for item in self.attributes[key]["Sa"]:
|
||||
if item.match(scode, lcode, ccode, start, end, self.relaxed):
|
||||
att[item.Key] = item.Value
|
||||
return att
|
||||
|
||||
class InventoryModifier(seiscomp.client.Application):
|
||||
def __init__(self, argc, argv):
|
||||
seiscomp.client.Application.__init__(self, argc, argv)
|
||||
self.setMessagingUsername("iModify")
|
||||
|
||||
self.rules = None
|
||||
self.relaxed = False
|
||||
self.outputFile = None
|
||||
|
||||
def _digest(self, tabFilename, rules = None):
|
||||
if not tabFilename or not os.path.isfile(tabFilename):
|
||||
raise Exception("Supplied filename is invalid.")
|
||||
|
||||
if not rules:
|
||||
rules = Rules(self.relaxed)
|
||||
|
||||
try:
|
||||
fd = open(tabFilename)
|
||||
for line in fd:
|
||||
obj = None
|
||||
line = line.strip()
|
||||
if not line or line[0] == "#": continue
|
||||
if str(line).find(":") == -1:
|
||||
raise Exception("Invalid line format '%s'" % line)
|
||||
(Type, Content) = line.split(":",1)
|
||||
|
||||
if Type == "Nw":
|
||||
nw = Nw(Content)
|
||||
key = rules.Nw(nw)
|
||||
elif Type == "Sg":
|
||||
raise Exception("Type not supported.")
|
||||
elif Type == "Na":
|
||||
na = Na(Content)
|
||||
rules.Na(key, na)
|
||||
elif Type == "Sa":
|
||||
sa = Sa(Content)
|
||||
rules.Sa(key, sa)
|
||||
elif Type == "Sr":
|
||||
raise Exception("Type not supported.")
|
||||
elif Type == "Ia":
|
||||
ia = Ia(Content)
|
||||
rules.Ia(ia)
|
||||
elif Type == "Se":
|
||||
raise Exception("Type not supported.")
|
||||
elif Type == "Dl":
|
||||
raise Exception("Type not supported.")
|
||||
elif Type == "Cl":
|
||||
raise Exception("Type not supported.")
|
||||
elif Type == "Ff":
|
||||
raise Exception("Type not supported.")
|
||||
elif Type == "If":
|
||||
raise Exception("Type not supported.")
|
||||
elif Type == "Pz":
|
||||
raise Exception("Type not supported.")
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
finally:
|
||||
if fd:
|
||||
fd.close()
|
||||
return rules
|
||||
|
||||
def validateParameters(self):
|
||||
outputFile = None
|
||||
rulesFile = None
|
||||
|
||||
if self.commandline().hasOption("rules"):
|
||||
rulesFile = self.commandline().optionString("rules")
|
||||
|
||||
if self.commandline().hasOption("output"):
|
||||
outputFile = self.commandline().optionString("output")
|
||||
|
||||
if self.commandline().hasOption("relaxed"):
|
||||
self.relaxed = True
|
||||
|
||||
if self.commandline().hasOption("inventory-db") and outputFile is None:
|
||||
print("Cannot send notifiers when loading inventory from file.", file=sys.stderr)
|
||||
return False
|
||||
|
||||
if self.commandline().unrecognizedOptions():
|
||||
print("Invalid options: ", end=' ', file=sys.stderr)
|
||||
for i in self.commandline().unrecognizedOptions():
|
||||
print(i, end=' ', file=sys.stderr)
|
||||
print("", file=sys.stderr)
|
||||
return False
|
||||
|
||||
if not rulesFile:
|
||||
print("No rule file was supplied for processing", file=sys.stderr)
|
||||
return False
|
||||
|
||||
if not os.path.isfile(rulesFile):
|
||||
argv0 = os.path.basename(self.arguments()[0])
|
||||
print("%s: %s: No such file or directory" % (argv0, rulesFile), file=sys.stderr)
|
||||
return False
|
||||
|
||||
if self.commandline().hasOption("inventory-db"):
|
||||
self.setDatabaseEnabled(False, False)
|
||||
self.setMessagingEnabled(False)
|
||||
|
||||
self.rules = self._digest(rulesFile, self.rules)
|
||||
self.outputFile = outputFile
|
||||
return True
|
||||
|
||||
def createCommandLineDescription(self):
|
||||
seiscomp.client.Application.createCommandLineDescription(self)
|
||||
|
||||
self.commandline().addGroup("Rules")
|
||||
self.commandline().addStringOption("Rules", "rules,r", "Input XML filename")
|
||||
self.commandline().addOption("Rules", "relaxed,e", "Relax rules for matching NSLC items")
|
||||
|
||||
self.commandline().addGroup("Dump")
|
||||
self.commandline().addStringOption("Dump", "output,o", "Output XML filename")
|
||||
|
||||
def initConfiguration(self):
|
||||
value = seiscomp.client.Application.initConfiguration(self)
|
||||
self.setLoggingToStdErr(True)
|
||||
self.setDatabaseEnabled(True, True)
|
||||
self.setMessagingEnabled(True)
|
||||
self.setLoadInventoryEnabled(True)
|
||||
return value
|
||||
|
||||
def send(self, *args):
|
||||
while not self.connection().send(*args):
|
||||
seiscomp.logging.warning("send failed, retrying")
|
||||
time.sleep(1)
|
||||
|
||||
def send_notifiers(self, group):
|
||||
Nsize = seiscomp.datamodel.Notifier.Size()
|
||||
|
||||
if Nsize > 0:
|
||||
seiscomp.logging.info("trying to apply %d change%s" % (Nsize,"s" if Nsize != 1 else "" ))
|
||||
else:
|
||||
seiscomp.logging.info("no changes to apply")
|
||||
return 0
|
||||
|
||||
Nmsg = seiscomp.datamodel.Notifier.GetMessage(True)
|
||||
it = Nmsg.iter()
|
||||
msg = seiscomp.datamodel.NotifierMessage()
|
||||
|
||||
maxmsg = 100
|
||||
sent = 0
|
||||
mcount = 0
|
||||
|
||||
try:
|
||||
try:
|
||||
while it.get():
|
||||
msg.attach(seiscomp.datamodel.Notifier_Cast(it.get()))
|
||||
mcount += 1
|
||||
if msg and mcount == maxmsg:
|
||||
sent += mcount
|
||||
seiscomp.logging.debug("sending message (%5.1f %%)" % (sent / float(Nsize) * 100.0))
|
||||
self.send(group, msg)
|
||||
msg.clear()
|
||||
mcount = 0
|
||||
next(it)
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
if msg.size():
|
||||
seiscomp.logging.debug("sending message (%5.1f %%)" % 100.0)
|
||||
self.send(group, msg)
|
||||
msg.clear()
|
||||
seiscomp.logging.info("done")
|
||||
return mcount
|
||||
|
||||
@staticmethod
|
||||
def _loop(obj, count):
|
||||
return [ obj(i) for i in range(count) ]
|
||||
|
||||
@staticmethod
|
||||
def _collect(obj):
|
||||
code = obj.code()
|
||||
start = datetime.datetime.strptime(obj.start().toString("%Y %m %d %H %M %S"), "%Y %m %d %H %M %S")
|
||||
try:
|
||||
end = obj.end()
|
||||
end = datetime.datetime.strptime(end.toString("%Y %m %d %H %M %S"), "%Y %m %d %H %M %S")
|
||||
except:
|
||||
end = None
|
||||
return (code, start, end)
|
||||
|
||||
@staticmethod
|
||||
def _modifyInventory(mode, obj, att):
|
||||
valid = sc3._findValidOnes(mode)
|
||||
if not att:
|
||||
return
|
||||
|
||||
# Why repeat the code in basesc3.py (sc3::_fillSc3())?
|
||||
# What about if there are existing comments/pids - won't
|
||||
# this code get the count wrong?? *FIXME*
|
||||
commentNum = 0
|
||||
for (k,p) in att.items():
|
||||
try:
|
||||
if k == 'Comment':
|
||||
# print('DEBUG: Adding comment', p)
|
||||
if p.startswith('Grant'):
|
||||
# 2020: These belong in DOI metadata, not here.
|
||||
continue
|
||||
|
||||
c = seiscomp.datamodel.Comment()
|
||||
c.setText(p)
|
||||
c.setId(str(commentNum))
|
||||
commentNum += 1
|
||||
obj.add(c)
|
||||
continue
|
||||
|
||||
if k == 'Pid':
|
||||
print('DEBUG: Adding Pid as comment', p)
|
||||
c = seiscomp.datamodel.Comment()
|
||||
(typ, val) = p.split(':', 1)
|
||||
s = '{"type":"%s", "value":"%s"}' % (typ.upper(), val)
|
||||
c.setText(s)
|
||||
c.setId('FDSNXML:Identifier/' + str(commentNum))
|
||||
commentNum += 1
|
||||
obj.add(c)
|
||||
continue
|
||||
|
||||
p = valid['attributes'][k]['validator'](p)
|
||||
getattr(obj, 'set'+k)(p)
|
||||
except KeyError:
|
||||
import string
|
||||
hint = ''
|
||||
if k[0] in string.lowercase:
|
||||
hint = " (try '%s' instead)" % ( k[0].upper() + k[1:])
|
||||
print('Modifying %s: \'%s\' is not a valid key%s' % (mode, k, hint), file=sys.stderr)
|
||||
obj.update()
|
||||
return
|
||||
|
||||
def run(self):
|
||||
rules = self.rules
|
||||
iv = seiscomp.client.Inventory.Instance().inventory()
|
||||
|
||||
if not rules:
|
||||
return False
|
||||
|
||||
if not iv:
|
||||
return False
|
||||
|
||||
seiscomp.logging.debug("Loaded %d networks" % iv.networkCount())
|
||||
if self.outputFile is None:
|
||||
seiscomp.datamodel.Notifier.Enable()
|
||||
self.setInterpretNotifierEnabled(True)
|
||||
|
||||
for net in self._loop(iv.network, iv.networkCount()):
|
||||
(ncode, nstart, nend) = self._collect(net)
|
||||
key = rules.findKey(ncode, nstart, nend)
|
||||
if not key: continue
|
||||
att = rules.getNetworkAttributes(key)
|
||||
self._modifyInventory("network", net, att)
|
||||
seiscomp.logging.info("%s %s" % (ncode, att))
|
||||
for sta in self._loop(net.station, net.stationCount()):
|
||||
(scode, sstart, send) = self._collect(sta)
|
||||
att = rules.getStationAttributes(key, ncode, scode, None, None, sstart, send)
|
||||
self._modifyInventory("station", sta, att)
|
||||
if att: seiscomp.logging.info(" %s %s" % (scode, att))
|
||||
for loc in self._loop(sta.sensorLocation, sta.sensorLocationCount()):
|
||||
(lcode, lstart, lend) = self._collect(loc)
|
||||
att = rules.getStationAttributes(key, ncode, scode, lcode, None, lstart, lend)
|
||||
self._modifyInventory("location", loc, att)
|
||||
if att: seiscomp.logging.info(" %s %s" % (lcode, att))
|
||||
for cha in self._loop(loc.stream, loc.streamCount()):
|
||||
(ccode, cstart, cend) = self._collect(cha)
|
||||
att = rules.getStationAttributes(key, ncode, scode, lcode, ccode, cstart, cend)
|
||||
self._modifyInventory("channel", cha, att)
|
||||
if att: seiscomp.logging.info(" %s %s" % (ccode, att))
|
||||
|
||||
for sensor in self._loop(iv.sensor, iv.sensorCount()):
|
||||
att = rules.getInstrumentsAttributes(sensor.name(), "Se")
|
||||
self._modifyInventory("sensor", sensor, att)
|
||||
|
||||
for datalogger in self._loop(iv.datalogger, iv.dataloggerCount()):
|
||||
att = rules.getInstrumentsAttributes(datalogger.name(), "Dl")
|
||||
self._modifyInventory("datalogger", datalogger, att)
|
||||
|
||||
return True
|
||||
|
||||
def done(self):
|
||||
if self.outputFile:
|
||||
ar = seiscomp.io.XMLArchive()
|
||||
ar.create(self.outputFile)
|
||||
ar.setFormattedOutput(True)
|
||||
ar.writeObject(seiscomp.client.Inventory.Instance().inventory())
|
||||
ar.close()
|
||||
else:
|
||||
self.send_notifiers("INVENTORY")
|
||||
seiscomp.client.Application.done(self)
|
||||
|
||||
if __name__ == "__main__":
|
||||
app = InventoryModifier(len(sys.argv), sys.argv)
|
||||
sys.exit(app())
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,17 @@
|
||||
# Defines a list of modules loaded at startup.
|
||||
plugins = ${plugins}, fdsnxml
|
||||
|
||||
# SeisComP applications access waveform data through the RecordStream
|
||||
# interface. Please consult the SeisComP documentation for a list of supported
|
||||
# services and their configuration.
|
||||
# This parameter configures the RecordStream URL, format:
|
||||
# [service://]location[#type]. "service" is the name of the recordstream
|
||||
# implementation. If "service" is not given "file://" is implied.
|
||||
recordstream = sdsarchive://@ROOTDIR@/var/lib/archive
|
||||
|
||||
|
||||
# Set the number of bytes to buffer for each chunk of waveform data served
|
||||
# to the client. The lower the buffer the higher the overhead of Python Twisted.
|
||||
# The higher the buffer the higher the memory usage per request. 100kB seems
|
||||
# to be a good trade-off.
|
||||
recordBulkSize = 102400
|
@ -0,0 +1,5 @@
|
||||
# UDP port for receiving GDRT messages. By default port 9999 will be used.
|
||||
plugins.gdrt.udpport = 9999
|
||||
|
||||
# Location of station list file.
|
||||
plugins.gdrt.stationsFrom = stations.txt
|
@ -0,0 +1,93 @@
|
||||
# Default plugins to load. Application specific configuration
|
||||
# files should use the 'plugins' entry to specify additional
|
||||
# plugins otherwise when using 'core.plugins' also these
|
||||
# default values are going to be overwritten.
|
||||
#
|
||||
# To be able to read from all supported databases all available
|
||||
# database plugins are loaded as 'core'.
|
||||
# All currently supported db backends: dbmysql, dbpostgresql, dbsqlite3
|
||||
core.plugins = dbmysql
|
||||
|
||||
# Use log level 2 (error and warning)
|
||||
logging {
|
||||
|
||||
level = 2
|
||||
|
||||
# Use logfiles. It is commented by default to allow applications to define
|
||||
# console output with their hard coded defaults. If this setting is enabled
|
||||
# it would otherwise always override the applications default logging
|
||||
# backend.
|
||||
#file = true
|
||||
|
||||
# Rotate the logfiles
|
||||
file {
|
||||
rotator = true
|
||||
|
||||
# Rotate each 86400 seconds (1 day)
|
||||
rotator.timeSpan = 86400
|
||||
|
||||
# Keep 7 rotated log files
|
||||
rotator.archiveSize = 7
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
# Server connection
|
||||
connection.server = localhost/production
|
||||
|
||||
# The connection timeout
|
||||
connection.timeout = 3
|
||||
|
||||
# How to transfer messages (binary, xml)?
|
||||
connection.encoding = binary
|
||||
|
||||
# Use slink (seedlink) as record source service.
|
||||
recordstream = slink://localhost:18000
|
||||
|
||||
# The agencyID to use when tagging processing results
|
||||
agencyID = GFZ
|
||||
|
||||
# Organization name used mainly by ArcLink and SeedLink.
|
||||
organization = Unset
|
||||
|
||||
# Configures the default filters selectable in manual picker.
|
||||
# The entry with a leading "@" is selected as default filter.
|
||||
picker.filters = \
|
||||
"BP 0.1 - 1 Hz 3rd order;RMHP(10)>>ITAPER(30)>>BW(3,0.1,1)", \
|
||||
"BP 0.1 - 2 Hz 3rd order;RMHP(10)>>ITAPER(30)>>BW(3,0.1,2)", \
|
||||
"BP 0.4 - 1 Hz 3rd order;RMHP(10)>>ITAPER(30)>>BW(3,0.4,1)", \
|
||||
"@BP 0.7 - 2 Hz 3rd order;RMHP(10)>>ITAPER(30)>>BW(3,0.7,2)", \
|
||||
"BP 1 - 3 Hz 3rd order;RMHP(5)>>ITAPER(10)>>BW(3,1.0,3)", \
|
||||
"BP 1 - 5 Hz 3rd order;RMHP(5)>>ITAPER(10)>>BW(3,1.0,5)", \
|
||||
"BP 2 - 4 Hz 3rd order;RMHP(5)>>ITAPER(10)>>BW(3,2.0,4)", \
|
||||
"BP 3 - 6 Hz 3rd order;RMHP(5)>>ITAPER(10)>>BW(3,3.0,6)", \
|
||||
"BP 4 - 8 Hz 3rd order;RMHP(5)>>ITAPER(10)>>BW(3,4.0,8)", \
|
||||
"HP 3 Hz 3rd order;RMHP(1)>>ITAPER(2)>>BW_HP(3,3)", \
|
||||
"BP 0.7 - 2 Hz + STA/LTA(1,50);RMHP(10)->ITAPER(30)->BW(3,0.7,2)->STALTA(1,50)"
|
||||
|
||||
# Configure the columns of the event list that are visible initially.
|
||||
# The first column containing the origin time is always visible and cannot
|
||||
# be hidden.
|
||||
# Possible values are:
|
||||
# * Type
|
||||
# * M
|
||||
# * MType
|
||||
# * Phases
|
||||
# * Lat
|
||||
# * Lon
|
||||
# * Depth
|
||||
# * Stat
|
||||
# * Agency
|
||||
# * Region
|
||||
# * ID
|
||||
eventlist.visibleColumns = M, MType, Phases, RMS, Lat, Lon, Depth, Stat, Agency, Region, ID
|
||||
|
||||
# Default travel time table configuration. Plugins can be added to for custom
|
||||
# travel time table implementations.
|
||||
# This configuration can be used by applications that need to know which
|
||||
# interfaces are activated and which tables they define.
|
||||
ttt {
|
||||
libtau.tables = iasp91, ak135
|
||||
LOCSAT.tables = iasp91, tab
|
||||
homogeneous.tables = ""
|
||||
}
|
@ -0,0 +1,22 @@
|
||||
# Send journals and event specific updates to the EVENT group.
|
||||
connection.primaryGroup = EVENT
|
||||
|
||||
# Receive objects from EVENT group. This is necessary to wait for event
|
||||
# association of imported origins.
|
||||
connection.subscriptions = EVENT
|
||||
|
||||
# Number of seconds to fetch missed updates on start up.
|
||||
backLog = 1800
|
||||
|
||||
# Number of public objects to cache.
|
||||
cacheSize = 5000
|
||||
|
||||
# Maximum number of notifiers to batch in one message. If set to 0 no size
|
||||
# limit is enforced. Make sure to not hit the overall message size limited of
|
||||
# 16MiB which is enforced by the messaging system.
|
||||
batchSize = 2000
|
||||
|
||||
# If event synchronisation is enabled and an incoming origin is not yet
|
||||
# associated with an event on the target machine then this timeout defines
|
||||
# the maximum number of seconds to wait for an association.
|
||||
eventAssociationTimeout = 10
|
@ -0,0 +1,3 @@
|
||||
# Defines a list of message groups to subscribe to. The default is usually
|
||||
# given by the application and does not need to be changed.
|
||||
connection.subscriptions = EVENT, LOCATION, MAGNITUDE
|
@ -0,0 +1,18 @@
|
||||
# Send to the AMPLITUDE group
|
||||
connection.primaryGroup = AMPLITUDE
|
||||
|
||||
# Receive objects from PICK, AMPLITUDE and LOCATION group
|
||||
connection.subscriptions = PICK, AMPLITUDE, LOCATION
|
||||
|
||||
# The amplitudes to compute triggered by an incoming Origin
|
||||
amplitudes = MLv, mb, mB, Mwp
|
||||
|
||||
# The minimum arrival weight within an origin to compute amplitudes
|
||||
# for the associated pick.
|
||||
amptool.minimumPickWeight = 0.5
|
||||
|
||||
# Timeout in seconds of the first data packet of waveform data acquisition.
|
||||
amptool.initialAcquisitionTimeout = 30
|
||||
|
||||
# Timeout in seconds of any subsequent data packet of waveform data acquisition.
|
||||
amptool.runningAcquisitionTimeout = 2
|
@ -0,0 +1,6 @@
|
||||
archive = @ROOTDIR@/var/lib/archive
|
||||
batchSize = 100
|
||||
threads = 1
|
||||
jitter = 0.5
|
||||
deepScan = false
|
||||
|
@ -0,0 +1,84 @@
|
||||
## Send to the LOCATION group
|
||||
connection.primaryGroup = LOCATION
|
||||
|
||||
## Receive objects from PICK and AMPLITUDE groups
|
||||
connection.subscriptions = PICK, AMPLITUDE
|
||||
|
||||
## max. permissible RMS for a location to be reported
|
||||
#autoloc.maxRMS = 3.5
|
||||
|
||||
## max. individual residual (unweighted) for a pick to
|
||||
## be used in location
|
||||
#autoloc.maxResidual = 7.0
|
||||
|
||||
## Max. secondary azimuth gap for an origin to be reported by.
|
||||
## Default is 360 degrees, i.e. no restriction based on this parameter.
|
||||
#autoloc.maxSGAP = 360
|
||||
|
||||
## Arrivals with exceptionally large amplitudes may be
|
||||
## flagged as XXL, allowing (in future) faster, preliminary
|
||||
## "heads-up" alerts.
|
||||
#autoloc.thresholdXXL = 10000.
|
||||
|
||||
#autoloc.maxStationDistance = 180
|
||||
#autoloc.maxDistanceXXL = 10
|
||||
#autoloc.minPhaseCount = 6
|
||||
#autoloc.minPhaseCountXXL = 4
|
||||
|
||||
## If the station count for stations at < 105 degrees
|
||||
## distance exceeds this number, no picks at > 105 degrees will be
|
||||
## used in location. They will be loosely associated, though.
|
||||
#autoloc.minStaCountIgnorePKP = 30
|
||||
|
||||
## Clean-up interval for removing old/unused objects, in seconds
|
||||
## Don't change.
|
||||
#autoloc.cleanupInterval = 3600
|
||||
|
||||
## max. age for objects kept in memory, in seconds
|
||||
## Default is 6 hours - don't change.
|
||||
#autoloc.maxAge = 21600
|
||||
|
||||
## Don't change.
|
||||
#autoloc.wakeupInterval = 5
|
||||
|
||||
## Grid configuration
|
||||
#autoloc.grid = @DATADIR@/scautoloc/grid.conf
|
||||
|
||||
## Station configuration
|
||||
#autoloc.stationConfig = @DATADIR@/scautoloc/station.conf
|
||||
|
||||
## This is only relevant in offline/testing mode
|
||||
#locator.stationLocations = @DATADIR@/scautoloc/station-locations.conf
|
||||
|
||||
## Manual picks/origins can be fed back into autoloc for two purposes:
|
||||
## * passive association to a solution from a "trusted" source so that we
|
||||
## avoid fake or wrong locations due to events outside our area of interest
|
||||
## * use the manual origins in further processing, especially the manual picks.
|
||||
## Possibly also honor an operator specified fixed depth.
|
||||
## Currently we only permit use of manual picks which are then used
|
||||
## instead of the corresponding automatic picks (if existing)
|
||||
# autoloc.useManualPicks = false
|
||||
|
||||
|
||||
## Log all picks received by scautoloc to this file
|
||||
autoloc.pickLog = @LOGDIR@/autoloc-picklog
|
||||
|
||||
# Amplitude type to be used as SNR amplitude
|
||||
# Don't change unless you know exactly what you are doing.
|
||||
autoloc.amplTypeSNR = snr
|
||||
|
||||
# Amplitude type to be used as absolute amplitude
|
||||
# Don't change unless you know exactly what you are doing.
|
||||
autoloc.amplTypeAbs = mb
|
||||
|
||||
# Use manual origins from our own agency. Essentially it means to
|
||||
# use manual picks from manual origins, which is assumed to be
|
||||
# better than using only automatic picks.
|
||||
autoloc.useManualOrigins = false
|
||||
# NOTE: If you set the above to true, then make sure to add the
|
||||
# LOCATION group to connection.subscriptions!
|
||||
|
||||
# If autoloc.useManualOrigins is true, adopt the depth from manual
|
||||
# origins, which is especially important if it was fixed by the analyst.
|
||||
autoloc.adoptManualDepth = false
|
||||
|
@ -0,0 +1,87 @@
|
||||
# Send to the PICK group
|
||||
connection.primaryGroup = PICK
|
||||
|
||||
# Send amplitudes to this group
|
||||
connection.amplitudeGroup = AMPLITUDE
|
||||
|
||||
# Receive objects from CONFIG group
|
||||
connection.subscriptions = CONFIG
|
||||
|
||||
# The filter used to trigger
|
||||
filter = "RMHP(10)>>ITAPER(30)>>BW(4,0.7,2)>>STALTA(2,80)"
|
||||
|
||||
# The time correction applied to a detected pick
|
||||
timeCorrection = -0.8
|
||||
|
||||
# The record ringbuffer size in seconds
|
||||
ringBufferSize = 300
|
||||
|
||||
# The leadTime defines the time in seconds to
|
||||
# start picking on the streams before current
|
||||
# time
|
||||
leadTime = 60
|
||||
|
||||
# The initTime defines a timespan in seconds
|
||||
# for that the picker is blind after initialization
|
||||
# This time is needed to initialize the filter and
|
||||
# depends on it
|
||||
initTime = 60
|
||||
|
||||
# Interpolate gaps linearly? This is valid for gaps
|
||||
# short than thresholds.maxGapLength
|
||||
gapInterpolation = false
|
||||
|
||||
# For which value on a filtered stream is
|
||||
# a pick detected
|
||||
thresholds.triggerOn = 3
|
||||
|
||||
# The value the filtered stream must reach to
|
||||
# enable detection again
|
||||
thresholds.triggerOff = 1.5
|
||||
|
||||
# The maximum gap length to handle. Gaps larger
|
||||
# than this size reset the picker
|
||||
thresholds.maxGapLength = 4.5
|
||||
|
||||
# The timeWindow used to compute a maximum (snr)
|
||||
# amplitude on the filtered stream
|
||||
thresholds.amplMaxTimeWindow = 10
|
||||
|
||||
thresholds.deadTime = 30
|
||||
thresholds.minAmplOffset = 3
|
||||
|
||||
# The amplitudes to compute triggered by
|
||||
# a new P Pick continuously without having
|
||||
# an Origin
|
||||
amplitudes = MLv, mb, mB
|
||||
|
||||
# Configures the picker to use. By default only simple
|
||||
# STALTA detections are emitted as picks. To enable "repicking"
|
||||
# define a picker algorithm here.
|
||||
picker = ""
|
||||
|
||||
# Configures the secondary picker to be used.
|
||||
spicker = ""
|
||||
|
||||
# Configures the feature extraction type to be used
|
||||
fx = ""
|
||||
|
||||
# If enabled the all streams are used for picking that are received by the
|
||||
# picker. This option has only effect if a file is used as input which contains
|
||||
# more data than the picker requests or if amplitudes are enabled which are using
|
||||
# the horizontal components.
|
||||
useAllStreams = false
|
||||
|
||||
# If enabled the all secondary pickers that were triggered by a previous pick
|
||||
# will be terminated when a new detection or pick has been found. This aims to
|
||||
# avoid the case where an S phase is wrongly picked as P but would also be
|
||||
# picked as S by the secondary picker. But suppressing the S pick can lead to
|
||||
# undesired results. It might be better in some situations to have two picks
|
||||
# (P and S) instead only a wrong P.
|
||||
killPendingSPickers = true
|
||||
|
||||
# If enabled and a picker is configured then detections are sent as well.
|
||||
# To distinguish between detections and picks the evaluation mode of the pick
|
||||
# is set to manual. This is meant to be a debug option which can be used to
|
||||
# compare detections and picks by their evaluation mode.
|
||||
sendDetections = false
|
@ -0,0 +1,2 @@
|
||||
# Messaging subscriptions
|
||||
connection.subscriptions = EVENT, MAGNITUDE, LOCATION, FOCMECH
|
@ -0,0 +1,203 @@
|
||||
# Send to the EVENT group
|
||||
connection.primaryGroup = EVENT
|
||||
|
||||
# Receive objects from LOCATION, MAGNITUDE and FOCMECH group
|
||||
connection.subscriptions = LOCATION, MAGNITUDE, FOCMECH, EVENT
|
||||
|
||||
|
||||
# A magnitudes needs at least 4 stationmagnitudes
|
||||
# to become preferred
|
||||
eventAssociation.minimumMagnitudes = 4
|
||||
|
||||
# An automatic origin will be associated to an
|
||||
# event when it has at least 10 phases
|
||||
eventAssociation.minimumDefiningPhases = 10
|
||||
|
||||
# Minimum score of an automatic origin to be allowed to
|
||||
# form an new Event. This requires an activated score
|
||||
# plugin. See parameter score.
|
||||
# If set the minimumDefiningPhases has no effect at as
|
||||
# this check will be superseded by the score check. It is
|
||||
# the task of the score processor to evaluate a proper
|
||||
# score for all input origins.
|
||||
# By default this option is deactivated.
|
||||
#eventAssociation.minimumScore = 1
|
||||
|
||||
# An automatic origin will be associated to an
|
||||
# event when it falls inside this region.
|
||||
# Format: min-lat, min-lon, max-lat, max-lon
|
||||
#eventAssociation.region.rect = -90,-180,90,180
|
||||
|
||||
# Search 1800 seconds BEFORE origin time of a
|
||||
# new location for matching events
|
||||
eventAssociation.eventTimeBefore = 1800
|
||||
|
||||
# Search 1800 seconds AFTER origin time of a
|
||||
# new location for matching events
|
||||
eventAssociation.eventTimeAfter = 1800
|
||||
|
||||
# An origin will be associated to an existing
|
||||
# event when at least 3 picks matches with
|
||||
# former associated origins
|
||||
eventAssociation.minimumMatchingArrivals = 3
|
||||
|
||||
# If this time window in seconds is negative, pickIDs
|
||||
# are compared to find matching arrivals. A non negative
|
||||
# value (including 0) compares pick times regardless
|
||||
# of the pickID.
|
||||
# Pass: |pick1.time - pick2.time| <= threshold
|
||||
eventAssociation.maximumMatchingArrivalTimeDiff = -1
|
||||
|
||||
# This parameter is only used in conjunction with
|
||||
# eventAssociation.maximumMatchingArrivalTimeDiff. If a station
|
||||
# has multiple associated arrivals for a particular event, this
|
||||
# flag defines if the time distance of a new pick to all arrivals
|
||||
# must be within eventAssociation.maximumMatchingArrivalTimeDiff
|
||||
# or if one matching arrival is enough.
|
||||
eventAssociation.compareAllArrivalTimes = true
|
||||
|
||||
# Associates an origin with an existing event
|
||||
# if the origin time differs not more
|
||||
# than 60 seconds unless the minimumMatchingArrivals
|
||||
# criteria matches.
|
||||
eventAssociation.maximumTimeSpan = 60
|
||||
|
||||
# Associates an origin to an existing event
|
||||
# when the location differs not more
|
||||
# than 5 degrees unless the minimumMatchingArrivals
|
||||
# criteria matches
|
||||
eventAssociation.maximumDistance = 5
|
||||
|
||||
# Minimum number of station magnitudes required for Mw(mB) to be considered as
|
||||
# preferred magnitude.
|
||||
eventAssociation.minMwCount = 8
|
||||
|
||||
# If false then the station count rules out the magnitude priority
|
||||
# which is only taken into account if two magnitudes have the
|
||||
# same station count.
|
||||
#
|
||||
# If true then the priority rules out the station count
|
||||
# which is only taken into account if two magnitudes have the
|
||||
# same priority.
|
||||
eventAssociation.magPriorityOverStationCount = false
|
||||
|
||||
# Minimum number of station magnitudes which ensures that Mw(mB) will be
|
||||
# preferred and not mb.
|
||||
eventAssociation.mbOverMwCount = 30
|
||||
|
||||
# Average between mb and Mw(mB) which must be exceeded to become Mw(mB)
|
||||
# preferred.
|
||||
eventAssociation.mbOverMwValue = 6
|
||||
|
||||
# The magnitude type priority list
|
||||
# Magnitudes with other types cannot become
|
||||
# preferred magnitudes
|
||||
eventAssociation.magTypes = M
|
||||
|
||||
# The agencyID priority list
|
||||
# When the eventtool comes to the point to select a preferred
|
||||
# origin it orders all origins by its
|
||||
# agency priority and selects then the best one among the
|
||||
# highest priority agency.
|
||||
# It also defines the agency priority for custom priority
|
||||
# checks (eventAssociation.priorities)
|
||||
#eventAssociation.agencies = GFZ
|
||||
|
||||
# The author priority list
|
||||
# When the eventtool comes to the point to select a preferred
|
||||
# origin it orders all origins by its
|
||||
# author priority and selects then the best one among the
|
||||
# highest priority author.
|
||||
# It also defines the author priority for custom priority
|
||||
# checks (eventAssociation.priorities)
|
||||
#eventAssociation.authors = scautoloc@localhost
|
||||
|
||||
# The general priority list to decide if an origin becomes preferred. The
|
||||
# priority decreases in the order of the parameters. This list is not used
|
||||
# unless this parameter is activated.
|
||||
# Empty priority list: scevent replicates the default hard wired behaviour:
|
||||
# AGENCY, STATUS, PHASES_AUTOMATIC, TIME_AUTOMATIC
|
||||
# Each item in the list corresponds to a check that is performed. Each check
|
||||
# computes a score of the incoming origin (s1) and the current preferred origin
|
||||
# (s2). If the s1 is lower than s2, the incoming origin is rejected and does
|
||||
# not become preferred. All subsequent checks are ignored. If s1 is equal to
|
||||
# s2, the next check in the list is performed. If s1 is larger than s2, the
|
||||
# origin becomes preferred and all subsequent checks are ignored.
|
||||
# Available tokens:
|
||||
# AGENCY: check based on agency priorities
|
||||
# AUTHOR: check based on author priorities
|
||||
# MODE: evaluation mode priority: 0 = unset, 1 = automatic, 2 = manual, manual
|
||||
# over-rules automatic
|
||||
# STATUS: priority combined from evaluation status and evaluation mode: -100 =
|
||||
# status is rejected, -1 = status is reported, 0 = status is preliminary or
|
||||
# status is unset and mode is automatic, 1 = status is confirmed or status is
|
||||
# unset and mode is manual, 2 = status is reviewed, 3 = status is final,
|
||||
# METHOD: check based on the method priorities
|
||||
# PHASES: higher phase count = higher priority
|
||||
# PHASES_AUTOMATIC: only checks phase priorities for incoming automatic origins
|
||||
# RMS: lower rms = higher priority
|
||||
# RMS_AUTOMATIC: only check RMS on incoming automatic origins
|
||||
# TIME: more recent origins (creationTime) have higher priorities
|
||||
# TIME_AUTOMATIC: only check creationTime priority on incoming automatic
|
||||
# origins
|
||||
# SCORE: evaluates the score according to a configured ScoreProcessor and
|
||||
# prefers the origin/focalmechanism with the highest score
|
||||
#eventAssociation.priorities = AGENCY, STATUS, PHASES_AUTOMATIC, TIME_AUTOMATIC
|
||||
|
||||
# If true, one magnitude will be preferred even if magnitude criteria are
|
||||
# not fullfilled.
|
||||
eventAssociation.enableFallbackMagnitude = false
|
||||
|
||||
# The eventID prefix
|
||||
# The eventID format is [prefix][year][code], e.g. gfz2008fdvg
|
||||
eventIDPrefix = "gfz"
|
||||
|
||||
# Defines the pattern to generate an event ID.
|
||||
# %p : prefix
|
||||
# %Y : year
|
||||
# %[w]c: alpha character
|
||||
# %[w]C: upper case alpha character
|
||||
# %[w]d: decimal
|
||||
# %[w]x: hexadecimal
|
||||
# %[w]X: upper case hexadecimal
|
||||
eventIDPattern = "%p%Y%04c"
|
||||
|
||||
# Configures the number of event ID slots to look back and forth when an event
|
||||
# ID is already taken. The default in previous versions was 5. Now -1 means
|
||||
# that the margin is determined automatically based on
|
||||
# "eventAssociation.eventTimeBefore" and "eventAssociation.eventTimeAfter".
|
||||
# According to the configured "eventIDPattern" a fixed time range per slot can
|
||||
# be computed and with that width the number of look ahead slots and look back
|
||||
# slots can be computed based on the given time ranges for event association.
|
||||
eventIDLookupMargin = -1
|
||||
|
||||
# Configures a timespan in seconds to delay origin association
|
||||
#eventAssociation.delayTimeSpan = 0
|
||||
|
||||
# AgencyID filter used to delay origin association if
|
||||
# eventAssociation.delayTimeSpan > 0
|
||||
#eventAssociation.delayFilter.agencyID = agency
|
||||
|
||||
# Author filter used to delay origin association if
|
||||
# eventAssociation.delayTimeSpan > 0
|
||||
#eventAssociation.delayFilter.author = author
|
||||
|
||||
# evaluationMode filter used to delay origin association if
|
||||
# eventAssociation.delayTimeSpan > 0. Allowed values are "manual" or "automatic"
|
||||
#eventAssociation.delayFilter.evaluationMode = automatic
|
||||
|
||||
# Defines whether to associate or to ignore origins derived from CMT/MT
|
||||
# inversions.
|
||||
eventAssociation.ignoreFMDerivedOrigins = true
|
||||
|
||||
# If the preferred origin has evaluation status 'rejected' the event type will
|
||||
# be set as 'not existing' unless the event type has been fixed by an operator
|
||||
# or the preferred origin has been fixed.
|
||||
eventAssociation.declareFakeEventForRejectedOrigin = false
|
||||
|
||||
# Allows to match picks that are associated with weight 0
|
||||
eventAssociation.allowLooseAssociatedArrivals = false
|
||||
|
||||
# If enabled then the EventDescription with type 'Flinn-Engdahl region'
|
||||
# will be populated with the Flinn-Engdahl region name.
|
||||
populateFERegion = false
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue