[seiscomp, scanloc] Install, add .gitignore
This commit is contained in:
4
lib/python/seiscomp/__init__.py
Normal file
4
lib/python/seiscomp/__init__.py
Normal file
@ -0,0 +1,4 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.setdlopenflags(os.RTLD_LAZY | os.RTLD_GLOBAL)
|
BIN
lib/python/seiscomp/__pycache__/__init__.cpython-312.pyc
Normal file
BIN
lib/python/seiscomp/__pycache__/__init__.cpython-312.pyc
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/__pycache__/config.cpython-312.pyc
Normal file
BIN
lib/python/seiscomp/__pycache__/config.cpython-312.pyc
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/__pycache__/kernel.cpython-312.pyc
Normal file
BIN
lib/python/seiscomp/__pycache__/kernel.cpython-312.pyc
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/__pycache__/shell.cpython-312.pyc
Normal file
BIN
lib/python/seiscomp/__pycache__/shell.cpython-312.pyc
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_client.so
Normal file
BIN
lib/python/seiscomp/_client.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_config.so
Normal file
BIN
lib/python/seiscomp/_config.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_core.so
Normal file
BIN
lib/python/seiscomp/_core.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_geo.so
Normal file
BIN
lib/python/seiscomp/_geo.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_io.so
Normal file
BIN
lib/python/seiscomp/_io.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_logging.so
Normal file
BIN
lib/python/seiscomp/_logging.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_math.so
Normal file
BIN
lib/python/seiscomp/_math.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_seismology.so
Normal file
BIN
lib/python/seiscomp/_seismology.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_system.so
Normal file
BIN
lib/python/seiscomp/_system.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_utils.so
Normal file
BIN
lib/python/seiscomp/_utils.so
Normal file
Binary file not shown.
560
lib/python/seiscomp/bindings2cfg.py
Normal file
560
lib/python/seiscomp/bindings2cfg.py
Normal file
@ -0,0 +1,560 @@
|
||||
############################################################################
|
||||
# Copyright (C) gempa GmbH #
|
||||
# All rights reserved. #
|
||||
# Contact: gempa GmbH (seiscomp-dev@gempa.de) #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
# #
|
||||
# Other Usage #
|
||||
# Alternatively, this file may be used in accordance with the terms and #
|
||||
# conditions contained in a signed written agreement between you and #
|
||||
# gempa GmbH. #
|
||||
############################################################################
|
||||
|
||||
import os, time, sys
|
||||
import seiscomp.core, seiscomp.client, seiscomp.datamodel
|
||||
import seiscomp.io, seiscomp.system
|
||||
|
||||
|
||||
def collectParams(container):
|
||||
params = {}
|
||||
for i in range(container.groupCount()):
|
||||
params.update(collectParams(container.group(i)))
|
||||
for i in range(container.structureCount()):
|
||||
params.update(collectParams(container.structure(i)))
|
||||
for i in range(container.parameterCount()):
|
||||
p = container.parameter(i)
|
||||
if p.symbol.stage == seiscomp.system.Environment.CS_UNDEFINED:
|
||||
continue
|
||||
params[p.variableName] = ",".join(p.symbol.values)
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def collect(idset, paramSetID):
|
||||
paramSet = seiscomp.datamodel.ParameterSet.Find(paramSetID)
|
||||
if not paramSet:
|
||||
return
|
||||
idset[paramSet.publicID()] = 1
|
||||
if not paramSet.baseID():
|
||||
return
|
||||
collect(idset, paramSet.baseID())
|
||||
|
||||
|
||||
def sync(paramSet, params):
|
||||
obsoleteParams = []
|
||||
seenParams = {}
|
||||
i = 0
|
||||
while i < paramSet.parameterCount():
|
||||
p = paramSet.parameter(i)
|
||||
if p.name() in params:
|
||||
if p.name() in seenParams:
|
||||
# Multiple parameter definitions with same name
|
||||
sys.stderr.write(
|
||||
f"- {p.publicID()}:{p.name()} / duplicate parameter name\n"
|
||||
)
|
||||
p.detach()
|
||||
continue
|
||||
seenParams[p.name()] = 1
|
||||
val = params[p.name()]
|
||||
if val != p.value():
|
||||
p.setValue(val)
|
||||
p.update()
|
||||
else:
|
||||
obsoleteParams.append(p)
|
||||
i = i + 1
|
||||
|
||||
for p in obsoleteParams:
|
||||
p.detach()
|
||||
|
||||
for key, val in list(params.items()):
|
||||
if key in seenParams:
|
||||
continue
|
||||
p = seiscomp.datamodel.Parameter.Create()
|
||||
p.setName(key)
|
||||
p.setValue(val)
|
||||
paramSet.add(p)
|
||||
|
||||
|
||||
class ConfigDBUpdater(seiscomp.client.Application):
|
||||
def __init__(self, argc, argv):
|
||||
seiscomp.client.Application.__init__(self, argc, argv)
|
||||
self.setLoggingToStdErr(True)
|
||||
self.setMessagingEnabled(True)
|
||||
self.setDatabaseEnabled(True, True)
|
||||
self.setAutoApplyNotifierEnabled(False)
|
||||
self.setInterpretNotifierEnabled(False)
|
||||
self.setMessagingUsername("_sccfgupd_")
|
||||
self.setLoadConfigModuleEnabled(True)
|
||||
# Load all configuration modules
|
||||
self.setConfigModuleName("")
|
||||
self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP)
|
||||
|
||||
self._moduleName = None
|
||||
self._outputFile = None
|
||||
self._createNotifier = False
|
||||
self._keyDir = None
|
||||
|
||||
def createCommandLineDescription(self):
|
||||
self.commandline().addGroup("Input")
|
||||
self.commandline().addStringOption(
|
||||
"Input",
|
||||
"key-dir",
|
||||
"Overrides the location of the default key directory ($SEISCOMP_ROOT/etc/key)",
|
||||
)
|
||||
self.commandline().addGroup("Output")
|
||||
self.commandline().addStringOption(
|
||||
"Output", "module-name", "The module name to be used for the config module. If not given then the application name is being used or 'trunk' if output to a file is enabled"
|
||||
)
|
||||
self.commandline().addStringOption(
|
||||
"Output", "output,o", "If given, an output XML file is generated"
|
||||
)
|
||||
self.commandline().addOption(
|
||||
"Output", "create-notifier", "If given then a notifier message containing all notifiers "
|
||||
"will be written to the output XML. This option only applies "
|
||||
"if an output file is given. Notifier creation either requires "
|
||||
"and input database and an input config XML as reference."
|
||||
)
|
||||
|
||||
def validateParameters(self):
|
||||
if not seiscomp.client.Application.validateParameters(self):
|
||||
return False
|
||||
|
||||
try:
|
||||
self._moduleName = self.commandline().optionString("module-name")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._outputFile = self.commandline().optionString("output")
|
||||
self._createNotifier = self.commandline().hasOption("create-notifier")
|
||||
# Switch to offline mode
|
||||
self.setMessagingEnabled(False)
|
||||
self.setDatabaseEnabled(False, False)
|
||||
if self._createNotifier:
|
||||
if self.isConfigDatabaseEnabled() == True:
|
||||
self.setDatabaseEnabled(True, False);
|
||||
else:
|
||||
self.setLoadConfigModuleEnabled(False)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._keyDir = self.commandline().optionString("key-dir")
|
||||
except:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
def init(self):
|
||||
if not seiscomp.client.Application.init(self):
|
||||
return False
|
||||
|
||||
# Initialize the basic directories
|
||||
filebase = seiscomp.system.Environment.Instance().installDir()
|
||||
descdir = os.path.join(filebase, "etc", "descriptions")
|
||||
|
||||
# Load definitions of the configuration schema
|
||||
defs = seiscomp.system.SchemaDefinitions()
|
||||
if not defs.load(descdir):
|
||||
print("Error: could not read descriptions", file=sys.stderr)
|
||||
return False
|
||||
|
||||
if defs.moduleCount() == 0:
|
||||
print("Warning: no modules defined, nothing to do", file=sys.stderr)
|
||||
return False
|
||||
|
||||
# Create a model from the schema and read its configuration including
|
||||
# all bindings.
|
||||
model = seiscomp.system.Model()
|
||||
if self._keyDir:
|
||||
model.keyDirOverride = self._keyDir
|
||||
model.create(defs)
|
||||
model.readConfig()
|
||||
|
||||
# Find all binding mods for trunk. Bindings of modules where standalone
|
||||
# is set to true are ignored. They are supposed to handle their bindings
|
||||
# on their own.
|
||||
self.bindingMods = []
|
||||
for i in range(defs.moduleCount()):
|
||||
mod = defs.module(i)
|
||||
# Ignore stand alone modules (eg seedlink, slarchive, ...) as they
|
||||
# are not using the trunk libraries and don't need database
|
||||
# configurations
|
||||
if mod.isStandalone():
|
||||
continue
|
||||
|
||||
self.bindingMods.append(mod.name)
|
||||
|
||||
if len(self.bindingMods) == 0:
|
||||
print("Warning: no usable modules found, nothing to do", file=sys.stderr)
|
||||
return False
|
||||
|
||||
self.stationSetups = {}
|
||||
|
||||
# Read bindings
|
||||
for m in self.bindingMods:
|
||||
mod = model.module(m)
|
||||
if not mod:
|
||||
print(f"Warning: module {m} not assigned", file=sys.stderr)
|
||||
continue
|
||||
if len(mod.bindings) == 0:
|
||||
continue
|
||||
|
||||
if len(m) > 20:
|
||||
print(
|
||||
f"Error: rejecting module {m} - name is longer than 20 characters",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return False
|
||||
|
||||
# Rename global to default for being compatible with older
|
||||
# releases
|
||||
if m == "global":
|
||||
m = "default"
|
||||
|
||||
print(f"+ {m}", file=sys.stderr)
|
||||
|
||||
for staid in list(mod.bindings.keys()):
|
||||
binding = mod.getBinding(staid)
|
||||
if not binding:
|
||||
continue
|
||||
# sys.stderr.write(" + %s.%s\n" % (staid.networkCode, staid.stationCode))
|
||||
params = {}
|
||||
for i in range(binding.sectionCount()):
|
||||
params.update(collectParams(binding.section(i)))
|
||||
key = (staid.networkCode, staid.stationCode)
|
||||
if not key in self.stationSetups:
|
||||
self.stationSetups[key] = {}
|
||||
self.stationSetups[key][m] = params
|
||||
print(
|
||||
f" + read {len(list(mod.bindings.keys()))} stations", file=sys.stderr
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def printUsage(self):
|
||||
print(
|
||||
"""Usage:
|
||||
bindings2cfg [options]
|
||||
|
||||
Synchronize bindings from key files with processing system or output as
|
||||
configuration XML file"""
|
||||
)
|
||||
|
||||
seiscomp.client.Application.printUsage(self)
|
||||
|
||||
print(
|
||||
"""Examples:
|
||||
Write bindings configuration from key directory to a configuration XML file:
|
||||
bindings2cfg --key-dir ./etc/key -o config.xml
|
||||
|
||||
Synchronize bindings configuration from key directory to a processing system
|
||||
bindings2cfg --key-dir ./etc/key -H proc
|
||||
"""
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def send(self, *args):
|
||||
"""
|
||||
A simple wrapper that sends a message and tries to resend it in case of
|
||||
an error.
|
||||
"""
|
||||
while not self.connection().send(*args):
|
||||
print("Warning: sending failed, retrying", file=sys.stderr)
|
||||
time.sleep(1)
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Reimplements the main loop of the application. This methods collects
|
||||
all bindings and updates the database. It searches for already existing
|
||||
objects and updates them or creates new objects. Objects that is didn't
|
||||
touched are removed. This tool is the only one that should writes the
|
||||
configuration into the database and thus manages the content.
|
||||
"""
|
||||
config = seiscomp.client.ConfigDB.Instance().config()
|
||||
if config is None:
|
||||
config = seiscomp.datamodel.Config()
|
||||
|
||||
configMod = None
|
||||
obsoleteConfigMods = []
|
||||
moduleName = self._moduleName
|
||||
|
||||
if self._outputFile is None or self._createNotifier == True:
|
||||
if not moduleName:
|
||||
moduleName = self.name()
|
||||
seiscomp.datamodel.Notifier.Enable()
|
||||
else:
|
||||
if not moduleName:
|
||||
moduleName = "trunk"
|
||||
|
||||
configID = f"Config/{moduleName}"
|
||||
|
||||
for i in range(config.configModuleCount()):
|
||||
if config.configModule(i).publicID() != configID:
|
||||
obsoleteConfigMods.append(config.configModule(i))
|
||||
else:
|
||||
configMod = config.configModule(i)
|
||||
|
||||
# Remove obsolete config modules
|
||||
for cm in obsoleteConfigMods:
|
||||
print(f"- {cm.name()} / obsolete module configuration", file=sys.stderr)
|
||||
ps = seiscomp.datamodel.ParameterSet.Find(cm.parameterSetID())
|
||||
if not ps is None:
|
||||
ps.detach()
|
||||
cm.detach()
|
||||
del obsoleteConfigMods
|
||||
|
||||
if not configMod:
|
||||
configMod = seiscomp.datamodel.ConfigModule.Find(configID)
|
||||
if configMod is None:
|
||||
configMod = seiscomp.datamodel.ConfigModule.Create(configID)
|
||||
config.add(configMod)
|
||||
else:
|
||||
if configMod.name() != moduleName:
|
||||
configMod.update()
|
||||
if not configMod.enabled():
|
||||
configMod.update()
|
||||
|
||||
configMod.setName(moduleName)
|
||||
configMod.setEnabled(True)
|
||||
else:
|
||||
if configMod.name() != moduleName:
|
||||
configMod.setName(moduleName)
|
||||
configMod.update()
|
||||
paramSet = seiscomp.datamodel.ParameterSet.Find(configMod.parameterSetID())
|
||||
if configMod.parameterSetID():
|
||||
configMod.setParameterSetID("")
|
||||
configMod.update()
|
||||
|
||||
if not paramSet is None:
|
||||
paramSet.detach()
|
||||
|
||||
stationConfigs = {}
|
||||
obsoleteStationConfigs = []
|
||||
|
||||
for i in range(configMod.configStationCount()):
|
||||
cs = configMod.configStation(i)
|
||||
if (cs.networkCode(), cs.stationCode()) in self.stationSetups:
|
||||
stationConfigs[(cs.networkCode(), cs.stationCode())] = cs
|
||||
else:
|
||||
obsoleteStationConfigs.append(cs)
|
||||
|
||||
for cs in obsoleteStationConfigs:
|
||||
print(
|
||||
f"- {configMod.name()}/{cs.networkCode()}/{cs.stationCode()} / obsolete "
|
||||
"station configuration",
|
||||
file=sys.stderr,
|
||||
)
|
||||
cs.detach()
|
||||
del obsoleteStationConfigs
|
||||
|
||||
for staid, setups in list(self.stationSetups.items()):
|
||||
try:
|
||||
cs = stationConfigs[staid]
|
||||
except:
|
||||
cs = seiscomp.datamodel.ConfigStation.Find(
|
||||
f"Config/{configMod.name()}/{staid[0]}/{staid[1]}"
|
||||
)
|
||||
if not cs:
|
||||
cs = seiscomp.datamodel.ConfigStation.Create(
|
||||
f"Config/{configMod.name()}/{staid[0]}/{staid[1]}"
|
||||
)
|
||||
configMod.add(cs)
|
||||
cs.setNetworkCode(staid[0])
|
||||
cs.setStationCode(staid[1])
|
||||
cs.setEnabled(True)
|
||||
|
||||
ci = seiscomp.datamodel.CreationInfo()
|
||||
ci.setCreationTime(seiscomp.core.Time.GMT())
|
||||
ci.setAgencyID(self.agencyID())
|
||||
ci.setAuthor(self.name())
|
||||
cs.setCreationInfo(ci)
|
||||
|
||||
stationSetups = {}
|
||||
obsoleteSetups = []
|
||||
for i in range(cs.setupCount()):
|
||||
setup = cs.setup(i)
|
||||
if setup.name() in setups:
|
||||
stationSetups[setup.name()] = setup
|
||||
else:
|
||||
obsoleteSetups.append(setup)
|
||||
|
||||
for s in obsoleteSetups:
|
||||
print(
|
||||
f"- {configMod.name()}/{cs.networkCode()}/{cs.stationCode()}/{setup.name()} "
|
||||
"/ obsolete station setup",
|
||||
file=sys.stderr,
|
||||
)
|
||||
ps = seiscomp.datamodel.ParameterSet.Find(s.parameterSetID())
|
||||
if ps:
|
||||
ps.detach()
|
||||
s.detach()
|
||||
del obsoleteSetups
|
||||
|
||||
newParamSets = {}
|
||||
globalSet = ""
|
||||
for mod, params in list(setups.items()):
|
||||
try:
|
||||
setup = stationSetups[mod]
|
||||
except:
|
||||
setup = seiscomp.datamodel.Setup()
|
||||
setup.setName(mod)
|
||||
setup.setEnabled(True)
|
||||
cs.add(setup)
|
||||
|
||||
paramSet = seiscomp.datamodel.ParameterSet.Find(setup.parameterSetID())
|
||||
if not paramSet:
|
||||
paramSet = seiscomp.datamodel.ParameterSet.Find(
|
||||
"ParameterSet/%s/Station/%s/%s/%s"
|
||||
% (
|
||||
configMod.name(),
|
||||
cs.networkCode(),
|
||||
cs.stationCode(),
|
||||
setup.name(),
|
||||
)
|
||||
)
|
||||
if not paramSet:
|
||||
paramSet = seiscomp.datamodel.ParameterSet.Create(
|
||||
"ParameterSet/%s/Station/%s/%s/%s"
|
||||
% (
|
||||
configMod.name(),
|
||||
cs.networkCode(),
|
||||
cs.stationCode(),
|
||||
setup.name(),
|
||||
)
|
||||
)
|
||||
config.add(paramSet)
|
||||
paramSet.setModuleID(configMod.publicID())
|
||||
paramSet.setCreated(seiscomp.core.Time.GMT())
|
||||
newParamSets[paramSet.publicID()] = 1
|
||||
setup.setParameterSetID(paramSet.publicID())
|
||||
if mod in stationSetups:
|
||||
setup.update()
|
||||
elif paramSet.moduleID() != configMod.publicID():
|
||||
paramSet.setModuleID(configMod.publicID())
|
||||
paramSet.update()
|
||||
|
||||
# Synchronize existing parameterset with the new parameters
|
||||
sync(paramSet, params)
|
||||
|
||||
if setup.name() == "default":
|
||||
globalSet = paramSet.publicID()
|
||||
|
||||
for i in range(cs.setupCount()):
|
||||
setup = cs.setup(i)
|
||||
paramSet = seiscomp.datamodel.ParameterSet.Find(setup.parameterSetID())
|
||||
if not paramSet:
|
||||
continue
|
||||
|
||||
if paramSet.publicID() != globalSet and paramSet.baseID() != globalSet:
|
||||
paramSet.setBaseID(globalSet)
|
||||
if not paramSet.publicID() in newParamSets:
|
||||
paramSet.update()
|
||||
|
||||
# Collect unused ParameterSets
|
||||
usedSets = {}
|
||||
for i in range(config.configModuleCount()):
|
||||
configMod = config.configModule(i)
|
||||
for j in range(configMod.configStationCount()):
|
||||
cs = configMod.configStation(j)
|
||||
for k in range(cs.setupCount()):
|
||||
setup = cs.setup(k)
|
||||
collect(usedSets, setup.parameterSetID())
|
||||
|
||||
# Delete unused ParameterSets
|
||||
i = 0
|
||||
while i < config.parameterSetCount():
|
||||
paramSet = config.parameterSet(i)
|
||||
if not paramSet.publicID() in usedSets:
|
||||
print(
|
||||
f"- {paramSet.publicID()} / obsolete parameter set", file=sys.stderr
|
||||
)
|
||||
paramSet.detach()
|
||||
else:
|
||||
i = i + 1
|
||||
|
||||
# Generate output file and exit if configured
|
||||
if self._outputFile is not None:
|
||||
ar = seiscomp.io.XMLArchive()
|
||||
if not ar.create(self._outputFile):
|
||||
print(
|
||||
f"Failed to created output file: {self._outputFile}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return False
|
||||
|
||||
ar.setFormattedOutput(True)
|
||||
if self._createNotifier:
|
||||
nmsg = seiscomp.datamodel.Notifier.GetMessage(True)
|
||||
ar.writeObject(nmsg)
|
||||
else:
|
||||
ar.writeObject(config)
|
||||
ar.close()
|
||||
return True
|
||||
|
||||
ncount = seiscomp.datamodel.Notifier.Size()
|
||||
if ncount > 0:
|
||||
print(f"+ synchronize {ncount} change(s)", file=sys.stderr)
|
||||
else:
|
||||
print("- database is already up-to-date", file=sys.stderr)
|
||||
return True
|
||||
|
||||
cfgmsg = seiscomp.datamodel.ConfigSyncMessage(False)
|
||||
cfgmsg.setCreationInfo(seiscomp.datamodel.CreationInfo())
|
||||
cfgmsg.creationInfo().setCreationTime(seiscomp.core.Time.GMT())
|
||||
cfgmsg.creationInfo().setAuthor(self.author())
|
||||
cfgmsg.creationInfo().setAgencyID(self.agencyID())
|
||||
self.send(seiscomp.client.Protocol.STATUS_GROUP, cfgmsg)
|
||||
|
||||
# Send messages in a batch of 100 notifiers to not exceed the
|
||||
# maximum allowed message size of ~300kb.
|
||||
msg = seiscomp.datamodel.NotifierMessage()
|
||||
nmsg = seiscomp.datamodel.Notifier.GetMessage(False)
|
||||
count = 0
|
||||
sys.stderr.write("\r + sending notifiers: %d%%" % (count * 100 / ncount))
|
||||
sys.stderr.flush()
|
||||
while nmsg:
|
||||
for o in nmsg:
|
||||
n = seiscomp.datamodel.Notifier.Cast(o)
|
||||
if n:
|
||||
msg.attach(n)
|
||||
|
||||
if msg.size() >= 100:
|
||||
count += msg.size()
|
||||
self.send("CONFIG", msg)
|
||||
msg.clear()
|
||||
sys.stderr.write(
|
||||
"\r + sending notifiers: %d%%" % (count * 100 / ncount)
|
||||
)
|
||||
sys.stderr.flush()
|
||||
|
||||
nmsg = seiscomp.datamodel.Notifier.GetMessage(False)
|
||||
|
||||
if msg.size() > 0:
|
||||
count += msg.size()
|
||||
self.send("CONFIG", msg)
|
||||
msg.clear()
|
||||
sys.stderr.write("\r + sending notifiers: %d%%" % (count * 100 / ncount))
|
||||
sys.stderr.flush()
|
||||
|
||||
sys.stderr.write("\n")
|
||||
|
||||
# Notify about end of synchronization
|
||||
cfgmsg.creationInfo().setCreationTime(seiscomp.core.Time.GMT())
|
||||
cfgmsg.isFinished = True
|
||||
self.send(seiscomp.client.Protocol.STATUS_GROUP, cfgmsg)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
app = ConfigDBUpdater(len(sys.argv), sys.argv)
|
||||
return app()
|
1984
lib/python/seiscomp/client.py
Normal file
1984
lib/python/seiscomp/client.py
Normal file
File diff suppressed because it is too large
Load Diff
857
lib/python/seiscomp/config.py
Normal file
857
lib/python/seiscomp/config.py
Normal file
@ -0,0 +1,857 @@
|
||||
# This file was automatically generated by SWIG (http://www.swig.org).
|
||||
# Version 4.0.2
|
||||
#
|
||||
# Do not make changes to this file unless you know what you are doing--modify
|
||||
# the SWIG interface file instead.
|
||||
|
||||
from sys import version_info as _swig_python_version_info
|
||||
if _swig_python_version_info < (2, 7, 0):
|
||||
raise RuntimeError("Python 2.7 or later required")
|
||||
|
||||
# Import the low-level C/C++ module
|
||||
if __package__ or "." in __name__:
|
||||
from . import _config
|
||||
else:
|
||||
import _config
|
||||
|
||||
try:
|
||||
import builtins as __builtin__
|
||||
except ImportError:
|
||||
import __builtin__
|
||||
|
||||
def _swig_repr(self):
|
||||
try:
|
||||
strthis = "proxy of " + self.this.__repr__()
|
||||
except __builtin__.Exception:
|
||||
strthis = ""
|
||||
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_instance_variable(set):
|
||||
def set_instance_attr(self, name, value):
|
||||
if name == "thisown":
|
||||
self.this.own(value)
|
||||
elif name == "this":
|
||||
set(self, name, value)
|
||||
elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
|
||||
set(self, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add instance attributes to %s" % self)
|
||||
return set_instance_attr
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_class_variable(set):
|
||||
def set_class_attr(cls, name, value):
|
||||
if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
|
||||
set(cls, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add class attributes to %s" % cls)
|
||||
return set_class_attr
|
||||
|
||||
|
||||
def _swig_add_metaclass(metaclass):
|
||||
"""Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
|
||||
def wrapper(cls):
|
||||
return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
|
||||
return wrapper
|
||||
|
||||
|
||||
class _SwigNonDynamicMeta(type):
|
||||
"""Meta class to enforce nondynamic attributes (no new attributes) for a class"""
|
||||
__setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
|
||||
|
||||
|
||||
import weakref
|
||||
|
||||
class SwigPyIterator(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
raise AttributeError("No constructor defined - class is abstract")
|
||||
__repr__ = _swig_repr
|
||||
__swig_destroy__ = _config.delete_SwigPyIterator
|
||||
|
||||
def value(self):
|
||||
return _config.SwigPyIterator_value(self)
|
||||
|
||||
def incr(self, n=1):
|
||||
return _config.SwigPyIterator_incr(self, n)
|
||||
|
||||
def decr(self, n=1):
|
||||
return _config.SwigPyIterator_decr(self, n)
|
||||
|
||||
def distance(self, x):
|
||||
return _config.SwigPyIterator_distance(self, x)
|
||||
|
||||
def equal(self, x):
|
||||
return _config.SwigPyIterator_equal(self, x)
|
||||
|
||||
def copy(self):
|
||||
return _config.SwigPyIterator_copy(self)
|
||||
|
||||
def next(self):
|
||||
return _config.SwigPyIterator_next(self)
|
||||
|
||||
def __next__(self):
|
||||
return _config.SwigPyIterator___next__(self)
|
||||
|
||||
def previous(self):
|
||||
return _config.SwigPyIterator_previous(self)
|
||||
|
||||
def advance(self, n):
|
||||
return _config.SwigPyIterator_advance(self, n)
|
||||
|
||||
def __eq__(self, x):
|
||||
return _config.SwigPyIterator___eq__(self, x)
|
||||
|
||||
def __ne__(self, x):
|
||||
return _config.SwigPyIterator___ne__(self, x)
|
||||
|
||||
def __iadd__(self, n):
|
||||
return _config.SwigPyIterator___iadd__(self, n)
|
||||
|
||||
def __isub__(self, n):
|
||||
return _config.SwigPyIterator___isub__(self, n)
|
||||
|
||||
def __add__(self, n):
|
||||
return _config.SwigPyIterator___add__(self, n)
|
||||
|
||||
def __sub__(self, *args):
|
||||
return _config.SwigPyIterator___sub__(self, *args)
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
# Register SwigPyIterator in _config:
|
||||
_config.SwigPyIterator_swigregister(SwigPyIterator)
|
||||
|
||||
ERROR = _config.ERROR
|
||||
WARNING = _config.WARNING
|
||||
INFO = _config.INFO
|
||||
DEBUG = _config.DEBUG
|
||||
class Logger(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
__swig_destroy__ = _config.delete_Logger
|
||||
|
||||
def log(self, arg0, filename, line, msg):
|
||||
return _config.Logger_log(self, arg0, filename, line, msg)
|
||||
|
||||
def __init__(self):
|
||||
if self.__class__ == Logger:
|
||||
_self = None
|
||||
else:
|
||||
_self = self
|
||||
_config.Logger_swiginit(self, _config.new_Logger(_self, ))
|
||||
def __disown__(self):
|
||||
self.this.disown()
|
||||
_config.disown_Logger(self)
|
||||
return weakref.proxy(self)
|
||||
|
||||
# Register Logger in _config:
|
||||
_config.Logger_swigregister(Logger)
|
||||
|
||||
class Exception(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.Exception_swiginit(self, _config.new_Exception(*args))
|
||||
__swig_destroy__ = _config.delete_Exception
|
||||
|
||||
def what(self):
|
||||
return _config.Exception_what(self)
|
||||
|
||||
# Register Exception in _config:
|
||||
_config.Exception_swigregister(Exception)
|
||||
cvar = _config.cvar
|
||||
|
||||
class OptionNotFoundException(Exception):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.OptionNotFoundException_swiginit(self, _config.new_OptionNotFoundException(*args))
|
||||
__swig_destroy__ = _config.delete_OptionNotFoundException
|
||||
|
||||
# Register OptionNotFoundException in _config:
|
||||
_config.OptionNotFoundException_swigregister(OptionNotFoundException)
|
||||
|
||||
class TypeConversionException(Exception):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.TypeConversionException_swiginit(self, _config.new_TypeConversionException(*args))
|
||||
__swig_destroy__ = _config.delete_TypeConversionException
|
||||
|
||||
# Register TypeConversionException in _config:
|
||||
_config.TypeConversionException_swigregister(TypeConversionException)
|
||||
|
||||
class SyntaxException(Exception):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.SyntaxException_swiginit(self, _config.new_SyntaxException(*args))
|
||||
__swig_destroy__ = _config.delete_SyntaxException
|
||||
|
||||
# Register SyntaxException in _config:
|
||||
_config.SyntaxException_swigregister(SyntaxException)
|
||||
|
||||
class CaseSensitivityException(Exception):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.CaseSensitivityException_swiginit(self, _config.new_CaseSensitivityException(*args))
|
||||
__swig_destroy__ = _config.delete_CaseSensitivityException
|
||||
|
||||
# Register CaseSensitivityException in _config:
|
||||
_config.CaseSensitivityException_swigregister(CaseSensitivityException)
|
||||
|
||||
class Symbol(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.Symbol_swiginit(self, _config.new_Symbol(*args))
|
||||
|
||||
def set(self, name, ns, values, uri, comment, stage=-1):
|
||||
return _config.Symbol_set(self, name, ns, values, uri, comment, stage)
|
||||
|
||||
def __eq__(self, symbol):
|
||||
return _config.Symbol___eq__(self, symbol)
|
||||
|
||||
def toString(self):
|
||||
return _config.Symbol_toString(self)
|
||||
name = property(_config.Symbol_name_get, _config.Symbol_name_set)
|
||||
ns = property(_config.Symbol_ns_get, _config.Symbol_ns_set)
|
||||
content = property(_config.Symbol_content_get, _config.Symbol_content_set)
|
||||
values = property(_config.Symbol_values_get, _config.Symbol_values_set)
|
||||
uri = property(_config.Symbol_uri_get, _config.Symbol_uri_set)
|
||||
comment = property(_config.Symbol_comment_get, _config.Symbol_comment_set)
|
||||
stage = property(_config.Symbol_stage_get, _config.Symbol_stage_set)
|
||||
line = property(_config.Symbol_line_get, _config.Symbol_line_set)
|
||||
__swig_destroy__ = _config.delete_Symbol
|
||||
|
||||
# Register Symbol in _config:
|
||||
_config.Symbol_swigregister(Symbol)
|
||||
|
||||
class SymbolTable(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self):
|
||||
_config.SymbolTable_swiginit(self, _config.new_SymbolTable())
|
||||
|
||||
def setCaseSensitivityCheck(self, arg2):
|
||||
return _config.SymbolTable_setCaseSensitivityCheck(self, arg2)
|
||||
|
||||
def setLogger(self, arg2):
|
||||
return _config.SymbolTable_setLogger(self, arg2)
|
||||
|
||||
def logger(self):
|
||||
return _config.SymbolTable_logger(self)
|
||||
|
||||
def add(self, *args):
|
||||
return _config.SymbolTable_add(self, *args)
|
||||
|
||||
def get(self, *args):
|
||||
return _config.SymbolTable_get(self, *args)
|
||||
|
||||
def remove(self, name):
|
||||
return _config.SymbolTable_remove(self, name)
|
||||
|
||||
def incrementObjectCount(self):
|
||||
return _config.SymbolTable_incrementObjectCount(self)
|
||||
|
||||
def decrementObjectCount(self):
|
||||
return _config.SymbolTable_decrementObjectCount(self)
|
||||
|
||||
def objectCount(self):
|
||||
return _config.SymbolTable_objectCount(self)
|
||||
|
||||
def toString(self):
|
||||
return _config.SymbolTable_toString(self)
|
||||
|
||||
def hasFileBeenIncluded(self, fileName):
|
||||
return _config.SymbolTable_hasFileBeenIncluded(self, fileName)
|
||||
|
||||
def addToIncludedFiles(self, fileName):
|
||||
return _config.SymbolTable_addToIncludedFiles(self, fileName)
|
||||
|
||||
def includesBegin(self):
|
||||
return _config.SymbolTable_includesBegin(self)
|
||||
|
||||
def includesEnd(self):
|
||||
return _config.SymbolTable_includesEnd(self)
|
||||
|
||||
def begin(self):
|
||||
return _config.SymbolTable_begin(self)
|
||||
|
||||
def end(self):
|
||||
return _config.SymbolTable_end(self)
|
||||
__swig_destroy__ = _config.delete_SymbolTable
|
||||
|
||||
# Register SymbolTable in _config:
|
||||
_config.SymbolTable_swigregister(SymbolTable)
|
||||
|
||||
class Config(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self):
|
||||
_config.Config_swiginit(self, _config.new_Config())
|
||||
__swig_destroy__ = _config.delete_Config
|
||||
|
||||
def setCaseSensitivityCheck(self, arg2):
|
||||
return _config.Config_setCaseSensitivityCheck(self, arg2)
|
||||
|
||||
def readConfig(self, file, stage=-1, raw=False):
|
||||
return _config.Config_readConfig(self, file, stage, raw)
|
||||
|
||||
def writeConfig(self, *args):
|
||||
return _config.Config_writeConfig(self, *args)
|
||||
|
||||
def setLogger(self, logger):
|
||||
return _config.Config_setLogger(self, logger)
|
||||
|
||||
def symbolsToString(self):
|
||||
return _config.Config_symbolsToString(self)
|
||||
|
||||
def names(self):
|
||||
return _config.Config_names(self)
|
||||
|
||||
def visitedFilesToString(self):
|
||||
return _config.Config_visitedFilesToString(self)
|
||||
|
||||
def getInt(self, *args):
|
||||
return _config.Config_getInt(self, *args)
|
||||
|
||||
def setInt(self, name, value):
|
||||
return _config.Config_setInt(self, name, value)
|
||||
|
||||
def getDouble(self, *args):
|
||||
return _config.Config_getDouble(self, *args)
|
||||
|
||||
def setDouble(self, name, value):
|
||||
return _config.Config_setDouble(self, name, value)
|
||||
|
||||
def getBool(self, *args):
|
||||
return _config.Config_getBool(self, *args)
|
||||
|
||||
def setBool(self, name, value):
|
||||
return _config.Config_setBool(self, name, value)
|
||||
|
||||
def getString(self, *args):
|
||||
return _config.Config_getString(self, *args)
|
||||
|
||||
def setString(self, name, value):
|
||||
return _config.Config_setString(self, name, value)
|
||||
|
||||
def remove(self, name):
|
||||
return _config.Config_remove(self, name)
|
||||
|
||||
def getInts(self, *args):
|
||||
return _config.Config_getInts(self, *args)
|
||||
|
||||
def setInts(self, name, values):
|
||||
return _config.Config_setInts(self, name, values)
|
||||
|
||||
def getDoubles(self, *args):
|
||||
return _config.Config_getDoubles(self, *args)
|
||||
|
||||
def setDoubles(self, name, values):
|
||||
return _config.Config_setDoubles(self, name, values)
|
||||
|
||||
def getBools(self, *args):
|
||||
return _config.Config_getBools(self, *args)
|
||||
|
||||
def setBools(self, name, values):
|
||||
return _config.Config_setBools(self, name, values)
|
||||
|
||||
def getStrings(self, *args):
|
||||
return _config.Config_getStrings(self, *args)
|
||||
|
||||
def setStrings(self, name, values):
|
||||
return _config.Config_setStrings(self, name, values)
|
||||
|
||||
def symbolTable(self):
|
||||
return _config.Config_symbolTable(self)
|
||||
|
||||
def eval(self, rvalue, result, resolveReferences=True, errmsg=None):
|
||||
return _config.Config_eval(self, rvalue, result, resolveReferences, errmsg)
|
||||
|
||||
@staticmethod
|
||||
def Eval(rvalue, result, resolveReferences=True, symtab=None, errmsg=None):
|
||||
return _config.Config_Eval(rvalue, result, resolveReferences, symtab, errmsg)
|
||||
|
||||
@staticmethod
|
||||
def writeValues(os, symbol, multilineLists=False):
|
||||
return _config.Config_writeValues(os, symbol, multilineLists)
|
||||
|
||||
@staticmethod
|
||||
def writeContent(os, symbol, multilineLists=False):
|
||||
return _config.Config_writeContent(os, symbol, multilineLists)
|
||||
|
||||
@staticmethod
|
||||
def writeSymbol(os, symbol, multilineLists=False):
|
||||
return _config.Config_writeSymbol(os, symbol, multilineLists)
|
||||
|
||||
@staticmethod
|
||||
def escapeIdentifier(arg1):
|
||||
return _config.Config_escapeIdentifier(arg1)
|
||||
|
||||
def trackVariables(self, enabled):
|
||||
return _config.Config_trackVariables(self, enabled)
|
||||
|
||||
def getVariables(self):
|
||||
return _config.Config_getVariables(self)
|
||||
|
||||
def escape(self, arg2):
|
||||
return _config.Config_escape(self, arg2)
|
||||
|
||||
# Register Config in _config:
|
||||
_config.Config_swigregister(Config)
|
||||
|
||||
def Config_Eval(rvalue, result, resolveReferences=True, symtab=None, errmsg=None):
|
||||
return _config.Config_Eval(rvalue, result, resolveReferences, symtab, errmsg)
|
||||
|
||||
def Config_writeValues(os, symbol, multilineLists=False):
|
||||
return _config.Config_writeValues(os, symbol, multilineLists)
|
||||
|
||||
def Config_writeContent(os, symbol, multilineLists=False):
|
||||
return _config.Config_writeContent(os, symbol, multilineLists)
|
||||
|
||||
def Config_writeSymbol(os, symbol, multilineLists=False):
|
||||
return _config.Config_writeSymbol(os, symbol, multilineLists)
|
||||
|
||||
def Config_escapeIdentifier(arg1):
|
||||
return _config.Config_escapeIdentifier(arg1)
|
||||
|
||||
class VectorStr(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def iterator(self):
|
||||
return _config.VectorStr_iterator(self)
|
||||
def __iter__(self):
|
||||
return self.iterator()
|
||||
|
||||
def __nonzero__(self):
|
||||
return _config.VectorStr___nonzero__(self)
|
||||
|
||||
def __bool__(self):
|
||||
return _config.VectorStr___bool__(self)
|
||||
|
||||
def __len__(self):
|
||||
return _config.VectorStr___len__(self)
|
||||
|
||||
def __getslice__(self, i, j):
|
||||
return _config.VectorStr___getslice__(self, i, j)
|
||||
|
||||
def __setslice__(self, *args):
|
||||
return _config.VectorStr___setslice__(self, *args)
|
||||
|
||||
def __delslice__(self, i, j):
|
||||
return _config.VectorStr___delslice__(self, i, j)
|
||||
|
||||
def __delitem__(self, *args):
|
||||
return _config.VectorStr___delitem__(self, *args)
|
||||
|
||||
def __getitem__(self, *args):
|
||||
return _config.VectorStr___getitem__(self, *args)
|
||||
|
||||
def __setitem__(self, *args):
|
||||
return _config.VectorStr___setitem__(self, *args)
|
||||
|
||||
def pop(self):
|
||||
return _config.VectorStr_pop(self)
|
||||
|
||||
def append(self, x):
|
||||
return _config.VectorStr_append(self, x)
|
||||
|
||||
def empty(self):
|
||||
return _config.VectorStr_empty(self)
|
||||
|
||||
def size(self):
|
||||
return _config.VectorStr_size(self)
|
||||
|
||||
def swap(self, v):
|
||||
return _config.VectorStr_swap(self, v)
|
||||
|
||||
def begin(self):
|
||||
return _config.VectorStr_begin(self)
|
||||
|
||||
def end(self):
|
||||
return _config.VectorStr_end(self)
|
||||
|
||||
def rbegin(self):
|
||||
return _config.VectorStr_rbegin(self)
|
||||
|
||||
def rend(self):
|
||||
return _config.VectorStr_rend(self)
|
||||
|
||||
def clear(self):
|
||||
return _config.VectorStr_clear(self)
|
||||
|
||||
def get_allocator(self):
|
||||
return _config.VectorStr_get_allocator(self)
|
||||
|
||||
def pop_back(self):
|
||||
return _config.VectorStr_pop_back(self)
|
||||
|
||||
def erase(self, *args):
|
||||
return _config.VectorStr_erase(self, *args)
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.VectorStr_swiginit(self, _config.new_VectorStr(*args))
|
||||
|
||||
def push_back(self, x):
|
||||
return _config.VectorStr_push_back(self, x)
|
||||
|
||||
def front(self):
|
||||
return _config.VectorStr_front(self)
|
||||
|
||||
def back(self):
|
||||
return _config.VectorStr_back(self)
|
||||
|
||||
def assign(self, n, x):
|
||||
return _config.VectorStr_assign(self, n, x)
|
||||
|
||||
def resize(self, *args):
|
||||
return _config.VectorStr_resize(self, *args)
|
||||
|
||||
def insert(self, *args):
|
||||
return _config.VectorStr_insert(self, *args)
|
||||
|
||||
def reserve(self, n):
|
||||
return _config.VectorStr_reserve(self, n)
|
||||
|
||||
def capacity(self):
|
||||
return _config.VectorStr_capacity(self)
|
||||
__swig_destroy__ = _config.delete_VectorStr
|
||||
|
||||
# Register VectorStr in _config:
|
||||
_config.VectorStr_swigregister(VectorStr)
|
||||
|
||||
class VectorInt(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def iterator(self):
|
||||
return _config.VectorInt_iterator(self)
|
||||
def __iter__(self):
|
||||
return self.iterator()
|
||||
|
||||
def __nonzero__(self):
|
||||
return _config.VectorInt___nonzero__(self)
|
||||
|
||||
def __bool__(self):
|
||||
return _config.VectorInt___bool__(self)
|
||||
|
||||
def __len__(self):
|
||||
return _config.VectorInt___len__(self)
|
||||
|
||||
def __getslice__(self, i, j):
|
||||
return _config.VectorInt___getslice__(self, i, j)
|
||||
|
||||
def __setslice__(self, *args):
|
||||
return _config.VectorInt___setslice__(self, *args)
|
||||
|
||||
def __delslice__(self, i, j):
|
||||
return _config.VectorInt___delslice__(self, i, j)
|
||||
|
||||
def __delitem__(self, *args):
|
||||
return _config.VectorInt___delitem__(self, *args)
|
||||
|
||||
def __getitem__(self, *args):
|
||||
return _config.VectorInt___getitem__(self, *args)
|
||||
|
||||
def __setitem__(self, *args):
|
||||
return _config.VectorInt___setitem__(self, *args)
|
||||
|
||||
def pop(self):
|
||||
return _config.VectorInt_pop(self)
|
||||
|
||||
def append(self, x):
|
||||
return _config.VectorInt_append(self, x)
|
||||
|
||||
def empty(self):
|
||||
return _config.VectorInt_empty(self)
|
||||
|
||||
def size(self):
|
||||
return _config.VectorInt_size(self)
|
||||
|
||||
def swap(self, v):
|
||||
return _config.VectorInt_swap(self, v)
|
||||
|
||||
def begin(self):
|
||||
return _config.VectorInt_begin(self)
|
||||
|
||||
def end(self):
|
||||
return _config.VectorInt_end(self)
|
||||
|
||||
def rbegin(self):
|
||||
return _config.VectorInt_rbegin(self)
|
||||
|
||||
def rend(self):
|
||||
return _config.VectorInt_rend(self)
|
||||
|
||||
def clear(self):
|
||||
return _config.VectorInt_clear(self)
|
||||
|
||||
def get_allocator(self):
|
||||
return _config.VectorInt_get_allocator(self)
|
||||
|
||||
def pop_back(self):
|
||||
return _config.VectorInt_pop_back(self)
|
||||
|
||||
def erase(self, *args):
|
||||
return _config.VectorInt_erase(self, *args)
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.VectorInt_swiginit(self, _config.new_VectorInt(*args))
|
||||
|
||||
def push_back(self, x):
|
||||
return _config.VectorInt_push_back(self, x)
|
||||
|
||||
def front(self):
|
||||
return _config.VectorInt_front(self)
|
||||
|
||||
def back(self):
|
||||
return _config.VectorInt_back(self)
|
||||
|
||||
def assign(self, n, x):
|
||||
return _config.VectorInt_assign(self, n, x)
|
||||
|
||||
def resize(self, *args):
|
||||
return _config.VectorInt_resize(self, *args)
|
||||
|
||||
def insert(self, *args):
|
||||
return _config.VectorInt_insert(self, *args)
|
||||
|
||||
def reserve(self, n):
|
||||
return _config.VectorInt_reserve(self, n)
|
||||
|
||||
def capacity(self):
|
||||
return _config.VectorInt_capacity(self)
|
||||
__swig_destroy__ = _config.delete_VectorInt
|
||||
|
||||
# Register VectorInt in _config:
|
||||
_config.VectorInt_swigregister(VectorInt)
|
||||
|
||||
class VectorDouble(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def iterator(self):
|
||||
return _config.VectorDouble_iterator(self)
|
||||
def __iter__(self):
|
||||
return self.iterator()
|
||||
|
||||
def __nonzero__(self):
|
||||
return _config.VectorDouble___nonzero__(self)
|
||||
|
||||
def __bool__(self):
|
||||
return _config.VectorDouble___bool__(self)
|
||||
|
||||
def __len__(self):
|
||||
return _config.VectorDouble___len__(self)
|
||||
|
||||
def __getslice__(self, i, j):
|
||||
return _config.VectorDouble___getslice__(self, i, j)
|
||||
|
||||
def __setslice__(self, *args):
|
||||
return _config.VectorDouble___setslice__(self, *args)
|
||||
|
||||
def __delslice__(self, i, j):
|
||||
return _config.VectorDouble___delslice__(self, i, j)
|
||||
|
||||
def __delitem__(self, *args):
|
||||
return _config.VectorDouble___delitem__(self, *args)
|
||||
|
||||
def __getitem__(self, *args):
|
||||
return _config.VectorDouble___getitem__(self, *args)
|
||||
|
||||
def __setitem__(self, *args):
|
||||
return _config.VectorDouble___setitem__(self, *args)
|
||||
|
||||
def pop(self):
|
||||
return _config.VectorDouble_pop(self)
|
||||
|
||||
def append(self, x):
|
||||
return _config.VectorDouble_append(self, x)
|
||||
|
||||
def empty(self):
|
||||
return _config.VectorDouble_empty(self)
|
||||
|
||||
def size(self):
|
||||
return _config.VectorDouble_size(self)
|
||||
|
||||
def swap(self, v):
|
||||
return _config.VectorDouble_swap(self, v)
|
||||
|
||||
def begin(self):
|
||||
return _config.VectorDouble_begin(self)
|
||||
|
||||
def end(self):
|
||||
return _config.VectorDouble_end(self)
|
||||
|
||||
def rbegin(self):
|
||||
return _config.VectorDouble_rbegin(self)
|
||||
|
||||
def rend(self):
|
||||
return _config.VectorDouble_rend(self)
|
||||
|
||||
def clear(self):
|
||||
return _config.VectorDouble_clear(self)
|
||||
|
||||
def get_allocator(self):
|
||||
return _config.VectorDouble_get_allocator(self)
|
||||
|
||||
def pop_back(self):
|
||||
return _config.VectorDouble_pop_back(self)
|
||||
|
||||
def erase(self, *args):
|
||||
return _config.VectorDouble_erase(self, *args)
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.VectorDouble_swiginit(self, _config.new_VectorDouble(*args))
|
||||
|
||||
def push_back(self, x):
|
||||
return _config.VectorDouble_push_back(self, x)
|
||||
|
||||
def front(self):
|
||||
return _config.VectorDouble_front(self)
|
||||
|
||||
def back(self):
|
||||
return _config.VectorDouble_back(self)
|
||||
|
||||
def assign(self, n, x):
|
||||
return _config.VectorDouble_assign(self, n, x)
|
||||
|
||||
def resize(self, *args):
|
||||
return _config.VectorDouble_resize(self, *args)
|
||||
|
||||
def insert(self, *args):
|
||||
return _config.VectorDouble_insert(self, *args)
|
||||
|
||||
def reserve(self, n):
|
||||
return _config.VectorDouble_reserve(self, n)
|
||||
|
||||
def capacity(self):
|
||||
return _config.VectorDouble_capacity(self)
|
||||
__swig_destroy__ = _config.delete_VectorDouble
|
||||
|
||||
# Register VectorDouble in _config:
|
||||
_config.VectorDouble_swigregister(VectorDouble)
|
||||
|
||||
class VectorBool(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def iterator(self):
|
||||
return _config.VectorBool_iterator(self)
|
||||
def __iter__(self):
|
||||
return self.iterator()
|
||||
|
||||
def __nonzero__(self):
|
||||
return _config.VectorBool___nonzero__(self)
|
||||
|
||||
def __bool__(self):
|
||||
return _config.VectorBool___bool__(self)
|
||||
|
||||
def __len__(self):
|
||||
return _config.VectorBool___len__(self)
|
||||
|
||||
def __getslice__(self, i, j):
|
||||
return _config.VectorBool___getslice__(self, i, j)
|
||||
|
||||
def __setslice__(self, *args):
|
||||
return _config.VectorBool___setslice__(self, *args)
|
||||
|
||||
def __delslice__(self, i, j):
|
||||
return _config.VectorBool___delslice__(self, i, j)
|
||||
|
||||
def __delitem__(self, *args):
|
||||
return _config.VectorBool___delitem__(self, *args)
|
||||
|
||||
def __getitem__(self, *args):
|
||||
return _config.VectorBool___getitem__(self, *args)
|
||||
|
||||
def __setitem__(self, *args):
|
||||
return _config.VectorBool___setitem__(self, *args)
|
||||
|
||||
def pop(self):
|
||||
return _config.VectorBool_pop(self)
|
||||
|
||||
def append(self, x):
|
||||
return _config.VectorBool_append(self, x)
|
||||
|
||||
def empty(self):
|
||||
return _config.VectorBool_empty(self)
|
||||
|
||||
def size(self):
|
||||
return _config.VectorBool_size(self)
|
||||
|
||||
def swap(self, v):
|
||||
return _config.VectorBool_swap(self, v)
|
||||
|
||||
def begin(self):
|
||||
return _config.VectorBool_begin(self)
|
||||
|
||||
def end(self):
|
||||
return _config.VectorBool_end(self)
|
||||
|
||||
def rbegin(self):
|
||||
return _config.VectorBool_rbegin(self)
|
||||
|
||||
def rend(self):
|
||||
return _config.VectorBool_rend(self)
|
||||
|
||||
def clear(self):
|
||||
return _config.VectorBool_clear(self)
|
||||
|
||||
def get_allocator(self):
|
||||
return _config.VectorBool_get_allocator(self)
|
||||
|
||||
def pop_back(self):
|
||||
return _config.VectorBool_pop_back(self)
|
||||
|
||||
def erase(self, *args):
|
||||
return _config.VectorBool_erase(self, *args)
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.VectorBool_swiginit(self, _config.new_VectorBool(*args))
|
||||
|
||||
def push_back(self, x):
|
||||
return _config.VectorBool_push_back(self, x)
|
||||
|
||||
def front(self):
|
||||
return _config.VectorBool_front(self)
|
||||
|
||||
def back(self):
|
||||
return _config.VectorBool_back(self)
|
||||
|
||||
def assign(self, n, x):
|
||||
return _config.VectorBool_assign(self, n, x)
|
||||
|
||||
def resize(self, *args):
|
||||
return _config.VectorBool_resize(self, *args)
|
||||
|
||||
def insert(self, *args):
|
||||
return _config.VectorBool_insert(self, *args)
|
||||
|
||||
def reserve(self, n):
|
||||
return _config.VectorBool_reserve(self, n)
|
||||
|
||||
def capacity(self):
|
||||
return _config.VectorBool_capacity(self)
|
||||
__swig_destroy__ = _config.delete_VectorBool
|
||||
|
||||
# Register VectorBool in _config:
|
||||
_config.VectorBool_swigregister(VectorBool)
|
||||
|
||||
|
||||
|
2769
lib/python/seiscomp/core.py
Normal file
2769
lib/python/seiscomp/core.py
Normal file
File diff suppressed because it is too large
Load Diff
23447
lib/python/seiscomp/datamodel/__init__.py
Normal file
23447
lib/python/seiscomp/datamodel/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
BIN
lib/python/seiscomp/datamodel/_datamodel.so
Normal file
BIN
lib/python/seiscomp/datamodel/_datamodel.so
Normal file
Binary file not shown.
0
lib/python/seiscomp/fdsnws/__init__.py
Normal file
0
lib/python/seiscomp/fdsnws/__init__.py
Normal file
85
lib/python/seiscomp/fdsnws/authresource.py
Normal file
85
lib/python/seiscomp/fdsnws/authresource.py
Normal file
@ -0,0 +1,85 @@
|
||||
################################################################################
|
||||
# Copyright (C) 2013-2014 by gempa GmbH
|
||||
#
|
||||
# HTTP -- Utility methods which generate HTTP result strings
|
||||
#
|
||||
# Author: Stephan Herrnkind
|
||||
# Email: herrnkind@gempa.de
|
||||
################################################################################
|
||||
|
||||
import base64
|
||||
import datetime
|
||||
import hashlib
|
||||
import json
|
||||
import time
|
||||
import dateutil.parser
|
||||
|
||||
from twisted.web import http
|
||||
|
||||
import gnupg
|
||||
|
||||
import seiscomp.logging
|
||||
|
||||
from .utils import accessLog, u_str
|
||||
|
||||
from .http import BaseResource
|
||||
|
||||
|
||||
################################################################################
|
||||
class AuthResource(BaseResource):
|
||||
isLeaf = True
|
||||
|
||||
def __init__(self, version, gnupghome, userdb):
|
||||
super().__init__(version)
|
||||
|
||||
self.__gpg = gnupg.GPG(gnupghome=gnupghome)
|
||||
self.__userdb = userdb
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render_POST(self, request):
|
||||
request.setHeader("Content-Type", "text/plain; charset=utf-8")
|
||||
|
||||
try:
|
||||
verified = self.__gpg.decrypt(request.content.getvalue())
|
||||
|
||||
except OSError as e:
|
||||
msg = "gpg decrypt error"
|
||||
seiscomp.logging.warning(f"{msg}: {e}")
|
||||
return self.renderErrorPage(request, http.INTERNAL_SERVER_ERROR, msg)
|
||||
|
||||
except Exception as e:
|
||||
msg = "invalid token"
|
||||
seiscomp.logging.warning(f"{msg}: {e}")
|
||||
return self.renderErrorPage(request, http.BAD_REQUEST, msg)
|
||||
|
||||
if verified.trust_level is None or verified.trust_level < verified.TRUST_FULLY:
|
||||
msg = "token has invalid signature"
|
||||
seiscomp.logging.warning(msg)
|
||||
return self.renderErrorPage(request, http.BAD_REQUEST, msg)
|
||||
|
||||
try:
|
||||
attributes = json.loads(u_str(verified.data))
|
||||
td = dateutil.parser.parse(
|
||||
attributes["valid_until"]
|
||||
) - datetime.datetime.now(dateutil.tz.tzutc())
|
||||
lifetime = td.seconds + td.days * 24 * 3600
|
||||
|
||||
except Exception as e:
|
||||
msg = "token has invalid validity"
|
||||
seiscomp.logging.warning(f"{msg}: {e}")
|
||||
return self.renderErrorPage(request, http.BAD_REQUEST, msg)
|
||||
|
||||
if lifetime <= 0:
|
||||
msg = "token is expired"
|
||||
seiscomp.logging.warning(msg)
|
||||
return self.renderErrorPage(request, http.BAD_REQUEST, msg)
|
||||
|
||||
userid = base64.urlsafe_b64encode(hashlib.sha256(verified.data).digest()[:18])
|
||||
password = self.__userdb.addUser(
|
||||
u_str(userid),
|
||||
attributes,
|
||||
time.time() + min(lifetime, 24 * 3600),
|
||||
u_str(verified.data),
|
||||
)
|
||||
accessLog(request, None, http.OK, len(userid) + len(password) + 1, None)
|
||||
return userid + b":" + password
|
1442
lib/python/seiscomp/fdsnws/availability.py
Normal file
1442
lib/python/seiscomp/fdsnws/availability.py
Normal file
File diff suppressed because it is too large
Load Diff
796
lib/python/seiscomp/fdsnws/dataselect.py
Normal file
796
lib/python/seiscomp/fdsnws/dataselect.py
Normal file
@ -0,0 +1,796 @@
|
||||
################################################################################
|
||||
# Copyright (C) 2013-2014 by gempa GmbH
|
||||
#
|
||||
# FDSNDataSelect -- Implements the fdsnws-dataselect Web service, see
|
||||
# http://www.fdsn.org/webservices/
|
||||
#
|
||||
# Feature notes:
|
||||
# - 'quality' request parameter not implemented (information not available in
|
||||
# SeisComP)
|
||||
# - 'minimumlength' parameter is not implemented
|
||||
# - 'longestonly' parameter is not implemented
|
||||
#
|
||||
# Author: Stephan Herrnkind
|
||||
# Email: herrnkind@gempa.de
|
||||
################################################################################
|
||||
|
||||
import time
|
||||
|
||||
from io import BytesIO
|
||||
|
||||
import dateutil.parser
|
||||
|
||||
from twisted.cred import portal
|
||||
from twisted.web import http, resource, server
|
||||
from twisted.internet import interfaces, reactor
|
||||
|
||||
from zope.interface import implementer
|
||||
|
||||
from seiscomp import logging, mseedlite
|
||||
|
||||
from seiscomp.client import Application
|
||||
from seiscomp.core import Array, Record, Time
|
||||
from seiscomp.io import RecordInput, RecordStream
|
||||
|
||||
from .http import HTTP, BaseResource
|
||||
from .request import RequestOptions
|
||||
from . import utils
|
||||
|
||||
from .reqtrack import RequestTrackerDB
|
||||
from .fastsds import SDS
|
||||
|
||||
VERSION = "1.1.3"
|
||||
|
||||
################################################################################
|
||||
|
||||
|
||||
class _DataSelectRequestOptions(RequestOptions):
|
||||
MinTime = Time(0, 1)
|
||||
|
||||
PQuality = ["quality"]
|
||||
PMinimumLength = ["minimumlength"]
|
||||
PLongestOnly = ["longestonly"]
|
||||
|
||||
QualityValues = ["B", "D", "M", "Q", "R"]
|
||||
OutputFormats = ["miniseed", "mseed"]
|
||||
|
||||
POSTParams = RequestOptions.POSTParams + PQuality + PMinimumLength + PLongestOnly
|
||||
GETParams = RequestOptions.GETParams + POSTParams
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self.service = "fdsnws-dataselect"
|
||||
|
||||
self.quality = self.QualityValues[0]
|
||||
self.minimumLength = None
|
||||
self.longestOnly = None
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def _checkTimes(self, realtimeGap):
|
||||
maxEndTime = Time(self.accessTime)
|
||||
if realtimeGap is not None:
|
||||
maxEndTime -= Time(realtimeGap, 0)
|
||||
|
||||
for ro in self.streams:
|
||||
# create time if non was specified
|
||||
if ro.time is None:
|
||||
ro.time = RequestOptions.Time()
|
||||
# restrict time to 1970 - now
|
||||
if ro.time.start is None or ro.time.start < self.MinTime:
|
||||
ro.time.start = self.MinTime
|
||||
if ro.time.end is None or ro.time.end > maxEndTime:
|
||||
ro.time.end = maxEndTime
|
||||
|
||||
# remove items with start time >= end time
|
||||
self.streams = [x for x in self.streams if x.time.start < x.time.end]
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parse(self):
|
||||
# quality (optional), currently not supported
|
||||
key, value = self.getFirstValue(self.PQuality)
|
||||
if value is not None:
|
||||
value = value.upper()
|
||||
if value in self.QualityValues:
|
||||
self.quality = value
|
||||
else:
|
||||
self.raiseValueError(key)
|
||||
|
||||
# minimumlength(optional), currently not supported
|
||||
self.minimumLength = self.parseFloat(self.PMinimumLength, 0)
|
||||
|
||||
# longestonly (optional), currently not supported
|
||||
self.longestOnly = self.parseBool(self.PLongestOnly)
|
||||
|
||||
# generic parameters
|
||||
self.parseTime()
|
||||
self.parseChannel()
|
||||
self.parseOutput()
|
||||
|
||||
|
||||
################################################################################
|
||||
class _MyRecordStream:
|
||||
def __init__(self, url, trackerList, bufferSize):
|
||||
self.__url = url
|
||||
self.__trackerList = trackerList
|
||||
self.__bufferSize = bufferSize
|
||||
self.__tw = []
|
||||
|
||||
def addStream(self, net, sta, loc, cha, startt, endt, restricted, archNet):
|
||||
self.__tw.append((net, sta, loc, cha, startt, endt, restricted, archNet))
|
||||
|
||||
@staticmethod
|
||||
def __override_network(data, net):
|
||||
inp = BytesIO(data)
|
||||
out = BytesIO()
|
||||
|
||||
for rec in mseedlite.Input(inp):
|
||||
rec.net = net
|
||||
rec_len_exp = 9
|
||||
|
||||
while (1 << rec_len_exp) < rec.size:
|
||||
rec_len_exp += 1
|
||||
|
||||
rec.write(out, rec_len_exp)
|
||||
|
||||
return out.getvalue()
|
||||
|
||||
def input(self):
|
||||
fastsdsPrefix = "fastsds://"
|
||||
|
||||
if self.__url.startswith(fastsdsPrefix):
|
||||
fastsds = SDS(self.__url[len(fastsdsPrefix) :])
|
||||
|
||||
else:
|
||||
fastsds = None
|
||||
|
||||
for net, sta, loc, cha, startt, endt, restricted, archNet in self.__tw:
|
||||
if not archNet:
|
||||
archNet = net
|
||||
|
||||
size = 0
|
||||
|
||||
if fastsds:
|
||||
start = dateutil.parser.parse(startt.iso()).replace(tzinfo=None)
|
||||
end = dateutil.parser.parse(endt.iso()).replace(tzinfo=None)
|
||||
|
||||
for data in fastsds.getRawBytes(
|
||||
start, end, archNet, sta, loc, cha, self.__bufferSize
|
||||
):
|
||||
size += len(data)
|
||||
|
||||
if archNet == net:
|
||||
yield data
|
||||
|
||||
else:
|
||||
try:
|
||||
yield self.__override_network(data, net)
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"could not override network code: {e}")
|
||||
|
||||
else:
|
||||
rs = RecordStream.Open(self.__url)
|
||||
|
||||
if rs is None:
|
||||
logging.error("could not open record stream")
|
||||
break
|
||||
|
||||
rs.addStream(archNet, sta, loc, cha, startt, endt)
|
||||
rsInput = RecordInput(rs, Array.INT, Record.SAVE_RAW)
|
||||
eof = False
|
||||
|
||||
while not eof:
|
||||
data = b""
|
||||
|
||||
while len(data) < self.__bufferSize:
|
||||
try:
|
||||
rec = rsInput.next()
|
||||
|
||||
except Exception as e:
|
||||
logging.error(str(e))
|
||||
eof = True
|
||||
break
|
||||
|
||||
if rec is None:
|
||||
eof = True
|
||||
break
|
||||
|
||||
data += rec.raw().str()
|
||||
|
||||
if data:
|
||||
size += len(data)
|
||||
|
||||
if archNet == net:
|
||||
yield data
|
||||
|
||||
else:
|
||||
try:
|
||||
yield self.__override_network(data, net)
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"could not override network code: {e}")
|
||||
|
||||
for tracker in self.__trackerList:
|
||||
net_class = "t" if net[0] in "0123456789XYZ" else "p"
|
||||
|
||||
if size == 0:
|
||||
tracker.line_status(
|
||||
startt,
|
||||
endt,
|
||||
net,
|
||||
sta,
|
||||
cha,
|
||||
loc,
|
||||
restricted,
|
||||
net_class,
|
||||
True,
|
||||
[],
|
||||
"fdsnws",
|
||||
"NODATA",
|
||||
0,
|
||||
"",
|
||||
)
|
||||
|
||||
else:
|
||||
tracker.line_status(
|
||||
startt,
|
||||
endt,
|
||||
net,
|
||||
sta,
|
||||
cha,
|
||||
loc,
|
||||
restricted,
|
||||
net_class,
|
||||
True,
|
||||
[],
|
||||
"fdsnws",
|
||||
"OK",
|
||||
size,
|
||||
"",
|
||||
)
|
||||
|
||||
|
||||
################################################################################
|
||||
@implementer(interfaces.IPushProducer)
|
||||
class _WaveformProducer:
|
||||
def __init__(self, req, ro, rs, fileName, trackerList):
|
||||
self.req = req
|
||||
self.ro = ro
|
||||
self.it = rs.input()
|
||||
|
||||
self.fileName = fileName
|
||||
self.written = 0
|
||||
|
||||
self.trackerList = trackerList
|
||||
self.paused = False
|
||||
self.stopped = False
|
||||
self.running = False
|
||||
|
||||
def _flush(self, data):
|
||||
if self.stopped:
|
||||
return
|
||||
|
||||
if not self.paused:
|
||||
reactor.callInThread(self._collectData)
|
||||
|
||||
else:
|
||||
self.running = False
|
||||
|
||||
if self.written == 0:
|
||||
self.req.setHeader("Content-Type", "application/vnd.fdsn.mseed")
|
||||
self.req.setHeader(
|
||||
"Content-Disposition", f"attachment; filename={self.fileName}"
|
||||
)
|
||||
|
||||
self.req.write(data)
|
||||
self.written += len(data)
|
||||
|
||||
def _finish(self):
|
||||
if self.stopped:
|
||||
return
|
||||
|
||||
if self.written == 0:
|
||||
msg = "no waveform data found"
|
||||
errorpage = HTTP.renderErrorPage(
|
||||
self.req, http.NO_CONTENT, msg, VERSION, self.ro
|
||||
)
|
||||
|
||||
if errorpage:
|
||||
self.req.write(errorpage)
|
||||
|
||||
for tracker in self.trackerList:
|
||||
tracker.volume_status("fdsnws", "NODATA", 0, "")
|
||||
tracker.request_status("END", "")
|
||||
|
||||
else:
|
||||
logging.debug(
|
||||
f"{self.ro.service}: returned {self.written} bytes of mseed data"
|
||||
)
|
||||
utils.accessLog(self.req, self.ro, http.OK, self.written, None)
|
||||
|
||||
for tracker in self.trackerList:
|
||||
tracker.volume_status("fdsnws", "OK", self.written, "")
|
||||
tracker.request_status("END", "")
|
||||
|
||||
self.req.unregisterProducer()
|
||||
self.req.finish()
|
||||
|
||||
def _collectData(self):
|
||||
try:
|
||||
reactor.callFromThread(self._flush, next(self.it))
|
||||
|
||||
except StopIteration:
|
||||
reactor.callFromThread(self._finish)
|
||||
|
||||
def pauseProducing(self):
|
||||
self.paused = True
|
||||
|
||||
def resumeProducing(self):
|
||||
self.paused = False
|
||||
|
||||
if not self.running:
|
||||
self.running = True
|
||||
reactor.callInThread(self._collectData)
|
||||
|
||||
def stopProducing(self):
|
||||
self.stopped = True
|
||||
|
||||
logging.debug(
|
||||
f"{self.ro.service}: returned {self.written} bytes of mseed data (not "
|
||||
"completed)"
|
||||
)
|
||||
utils.accessLog(self.req, self.ro, http.OK, self.written, "not completed")
|
||||
|
||||
for tracker in self.trackerList:
|
||||
tracker.volume_status("fdsnws", "ERROR", self.written, "")
|
||||
tracker.request_status("END", "")
|
||||
|
||||
self.req.unregisterProducer()
|
||||
self.req.finish()
|
||||
|
||||
|
||||
################################################################################
|
||||
@implementer(portal.IRealm)
|
||||
class FDSNDataSelectRealm:
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self, inv, bufferSize, access):
|
||||
self.__inv = inv
|
||||
self.__bufferSize = bufferSize
|
||||
self.__access = access
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def requestAvatar(self, avatarId, _mind, *interfaces_):
|
||||
if resource.IResource in interfaces_:
|
||||
return (
|
||||
resource.IResource,
|
||||
FDSNDataSelect(
|
||||
self.__inv,
|
||||
self.__bufferSize,
|
||||
self.__access,
|
||||
{"mail": utils.u_str(avatarId), "blacklisted": False},
|
||||
),
|
||||
lambda: None,
|
||||
)
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
################################################################################
|
||||
@implementer(portal.IRealm)
|
||||
class FDSNDataSelectAuthRealm:
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self, inv, bufferSize, access, userdb):
|
||||
self.__inv = inv
|
||||
self.__bufferSize = bufferSize
|
||||
self.__access = access
|
||||
self.__userdb = userdb
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def requestAvatar(self, avatarId, _mind, *interfaces_):
|
||||
if resource.IResource in interfaces_:
|
||||
return (
|
||||
resource.IResource,
|
||||
FDSNDataSelect(
|
||||
self.__inv,
|
||||
self.__bufferSize,
|
||||
self.__access,
|
||||
self.__userdb.getAttributes(utils.u_str(avatarId)),
|
||||
),
|
||||
lambda: None,
|
||||
)
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
################################################################################
|
||||
class FDSNDataSelect(BaseResource):
|
||||
isLeaf = True
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self, inv, bufferSize, access=None, user=None):
|
||||
super().__init__(VERSION)
|
||||
|
||||
self._rsURL = Application.Instance().recordStreamURL()
|
||||
self.__inv = inv
|
||||
self.__access = access
|
||||
self.__user = user
|
||||
self.__bufferSize = bufferSize
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render_OPTIONS(self, req):
|
||||
req.setHeader("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
|
||||
req.setHeader(
|
||||
"Access-Control-Allow-Headers",
|
||||
"Accept, Content-Type, X-Requested-With, Origin",
|
||||
)
|
||||
req.setHeader("Content-Type", "text/plain; charset=utf-8")
|
||||
return ""
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render_GET(self, req):
|
||||
# Parse and validate GET parameters
|
||||
ro = _DataSelectRequestOptions()
|
||||
ro.userName = self.__user and self.__user.get("mail")
|
||||
try:
|
||||
ro.parseGET(req.args)
|
||||
ro.parse()
|
||||
# the GET operation supports exactly one stream filter
|
||||
ro.streams.append(ro)
|
||||
except ValueError as e:
|
||||
logging.warning(str(e))
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
|
||||
|
||||
return self._processRequest(req, ro)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render_POST(self, req):
|
||||
# Parse and validate POST parameters
|
||||
ro = _DataSelectRequestOptions()
|
||||
ro.userName = self.__user and self.__user.get("mail")
|
||||
try:
|
||||
ro.parsePOST(req.content)
|
||||
ro.parse()
|
||||
except ValueError as e:
|
||||
logging.warning(str(e))
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
|
||||
|
||||
return self._processRequest(req, ro)
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
def _networkIter(self, ro):
|
||||
for i in range(self.__inv.networkCount()):
|
||||
net = self.__inv.network(i)
|
||||
|
||||
# network code
|
||||
if ro.channel and not ro.channel.matchNet(net.code()):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if ro.time:
|
||||
try:
|
||||
end = net.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(net.start(), end):
|
||||
continue
|
||||
|
||||
yield net
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def _stationIter(net, ro):
|
||||
for i in range(net.stationCount()):
|
||||
sta = net.station(i)
|
||||
|
||||
# station code
|
||||
if ro.channel and not ro.channel.matchSta(sta.code()):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if ro.time:
|
||||
try:
|
||||
end = sta.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(sta.start(), end):
|
||||
continue
|
||||
|
||||
yield sta
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def _locationIter(sta, ro):
|
||||
for i in range(sta.sensorLocationCount()):
|
||||
loc = sta.sensorLocation(i)
|
||||
|
||||
# location code
|
||||
if ro.channel and not ro.channel.matchLoc(loc.code()):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if ro.time:
|
||||
try:
|
||||
end = loc.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(loc.start(), end):
|
||||
continue
|
||||
|
||||
yield loc
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def _streamIter(loc, ro):
|
||||
for i in range(loc.streamCount()):
|
||||
stream = loc.stream(i)
|
||||
|
||||
# stream code
|
||||
if ro.channel and not ro.channel.matchCha(stream.code()):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if ro.time:
|
||||
try:
|
||||
end = stream.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(stream.start(), end):
|
||||
continue
|
||||
|
||||
yield stream, False
|
||||
|
||||
for i in range(loc.auxStreamCount()):
|
||||
stream = loc.auxStream(i)
|
||||
|
||||
# stream code
|
||||
if ro.channel and not ro.channel.matchCha(stream.code()):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if ro.time:
|
||||
try:
|
||||
end = stream.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(stream.start(), end):
|
||||
continue
|
||||
|
||||
yield stream, True
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def _processRequest(self, req, ro):
|
||||
# pylint: disable=W0212
|
||||
|
||||
if ro.quality not in ("B", "M"):
|
||||
msg = "quality other than 'B' or 'M' not supported"
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
|
||||
|
||||
if ro.minimumLength:
|
||||
msg = "enforcing of minimum record length not supported"
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
|
||||
|
||||
if ro.longestOnly:
|
||||
msg = "limitation to longest segment not supported"
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
|
||||
|
||||
app = Application.Instance()
|
||||
ro._checkTimes(app._realtimeGap)
|
||||
|
||||
maxSamples = None
|
||||
if app._samplesM is not None:
|
||||
maxSamples = app._samplesM * 1000000
|
||||
samples = 0
|
||||
|
||||
trackerList = []
|
||||
userIP = ""
|
||||
|
||||
if app._trackdbEnabled or app._requestLog:
|
||||
xff = req.requestHeaders.getRawHeaders("x-forwarded-for")
|
||||
if xff:
|
||||
userIP = xff[0].split(",")[0].strip()
|
||||
else:
|
||||
userIP = req.getClientIP()
|
||||
|
||||
clientID = req.getHeader("User-Agent")
|
||||
if clientID:
|
||||
clientID = clientID[:80]
|
||||
else:
|
||||
clientID = "fdsnws"
|
||||
|
||||
if app._trackdbEnabled:
|
||||
if ro.userName:
|
||||
userID = ro.userName
|
||||
else:
|
||||
userID = app._trackdbDefaultUser
|
||||
|
||||
reqID = f"ws{str(int(round(time.time() * 1000) - 1420070400000))}"
|
||||
tracker = RequestTrackerDB(
|
||||
clientID,
|
||||
app.connection(),
|
||||
reqID,
|
||||
"WAVEFORM",
|
||||
userID,
|
||||
f"REQUEST WAVEFORM {reqID}",
|
||||
"fdsnws",
|
||||
userIP,
|
||||
req.getClientIP(),
|
||||
)
|
||||
|
||||
trackerList.append(tracker)
|
||||
|
||||
if app._requestLog:
|
||||
tracker = app._requestLog.tracker(ro.service, ro.userName, userIP, clientID)
|
||||
trackerList.append(tracker)
|
||||
|
||||
# Open record stream
|
||||
rs = _MyRecordStream(self._rsURL, trackerList, self.__bufferSize)
|
||||
|
||||
forbidden = None
|
||||
auxStreamsFound = False
|
||||
|
||||
# Add request streams
|
||||
# iterate over inventory networks
|
||||
for s in ro.streams:
|
||||
for net in self._networkIter(s):
|
||||
netRestricted = utils.isRestricted(net)
|
||||
if not trackerList and netRestricted and not self.__user:
|
||||
forbidden = forbidden or (forbidden is None)
|
||||
continue
|
||||
|
||||
for sta in self._stationIter(net, s):
|
||||
staRestricted = utils.isRestricted(sta)
|
||||
if not trackerList and staRestricted and not self.__user:
|
||||
forbidden = forbidden or (forbidden is None)
|
||||
continue
|
||||
|
||||
for loc in self._locationIter(sta, s):
|
||||
for cha, aux in self._streamIter(loc, s):
|
||||
start_time = max(cha.start(), s.time.start)
|
||||
|
||||
try:
|
||||
end_time = min(cha.end(), s.time.end)
|
||||
except ValueError:
|
||||
end_time = s.time.end
|
||||
|
||||
streamRestricted = (
|
||||
netRestricted
|
||||
or staRestricted
|
||||
or utils.isRestricted(cha)
|
||||
)
|
||||
if streamRestricted and (
|
||||
not self.__user
|
||||
or (
|
||||
self.__access
|
||||
and not self.__access.authorize(
|
||||
self.__user,
|
||||
net.code(),
|
||||
sta.code(),
|
||||
loc.code(),
|
||||
cha.code(),
|
||||
start_time,
|
||||
end_time,
|
||||
)
|
||||
)
|
||||
):
|
||||
for tracker in trackerList:
|
||||
net_class = (
|
||||
"t" if net.code()[0] in "0123456789XYZ" else "p"
|
||||
)
|
||||
tracker.line_status(
|
||||
start_time,
|
||||
end_time,
|
||||
net.code(),
|
||||
sta.code(),
|
||||
cha.code(),
|
||||
loc.code(),
|
||||
True,
|
||||
net_class,
|
||||
True,
|
||||
[],
|
||||
"fdsnws",
|
||||
"DENIED",
|
||||
0,
|
||||
"",
|
||||
)
|
||||
|
||||
forbidden = forbidden or (forbidden is None)
|
||||
continue
|
||||
|
||||
forbidden = False
|
||||
|
||||
# aux streams are deprecated, mark aux streams as
|
||||
# present to report warning later on, also do not
|
||||
# count aux stream samples due to their loose
|
||||
# binding to a aux device and source which only
|
||||
# optionally contains a sampling rate
|
||||
if aux:
|
||||
auxStreamsFound = True
|
||||
# enforce maximum sample per request restriction
|
||||
elif maxSamples is not None:
|
||||
try:
|
||||
n = cha.sampleRateNumerator()
|
||||
d = cha.sampleRateDenominator()
|
||||
except ValueError:
|
||||
logging.warning(
|
||||
"skipping stream without sampling rate "
|
||||
f"definition: {net.code()}.{sta.code()}."
|
||||
f"{loc.code()}.{cha.code()}"
|
||||
)
|
||||
continue
|
||||
|
||||
# calculate number of samples for requested
|
||||
# time window
|
||||
diffSec = (end_time - start_time).length()
|
||||
samples += int(diffSec * n / d)
|
||||
if samples > maxSamples:
|
||||
msg = (
|
||||
f"maximum number of {app._samplesM}M samples "
|
||||
"exceeded"
|
||||
)
|
||||
return self.renderErrorPage(
|
||||
req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro
|
||||
)
|
||||
|
||||
logging.debug(
|
||||
f"adding stream: {net.code()}.{sta.code()}.{loc.code()}"
|
||||
f".{cha.code()} {start_time.iso()} - {end_time.iso()}"
|
||||
)
|
||||
rs.addStream(
|
||||
net.code(),
|
||||
sta.code(),
|
||||
loc.code(),
|
||||
cha.code(),
|
||||
start_time,
|
||||
end_time,
|
||||
utils.isRestricted(cha),
|
||||
sta.archiveNetworkCode(),
|
||||
)
|
||||
|
||||
if forbidden:
|
||||
for tracker in trackerList:
|
||||
tracker.volume_status("fdsnws", "DENIED", 0, "")
|
||||
tracker.request_status("END", "")
|
||||
|
||||
msg = "access denied"
|
||||
return self.renderErrorPage(req, http.FORBIDDEN, msg, ro)
|
||||
|
||||
if forbidden is None:
|
||||
for tracker in trackerList:
|
||||
tracker.volume_status("fdsnws", "NODATA", 0, "")
|
||||
tracker.request_status("END", "")
|
||||
|
||||
msg = "no metadata found"
|
||||
return self.renderErrorPage(req, http.NO_CONTENT, msg, ro)
|
||||
|
||||
if auxStreamsFound:
|
||||
msg = (
|
||||
"the request contains at least one auxiliary stream which are "
|
||||
"deprecated"
|
||||
)
|
||||
if maxSamples is not None:
|
||||
msg += (
|
||||
" and whose samples are not included in the maximum sample per "
|
||||
"request limit"
|
||||
)
|
||||
logging.info(msg)
|
||||
|
||||
# Build output filename
|
||||
fileName = (
|
||||
Application.Instance()._fileNamePrefix.replace(
|
||||
"%time", time.strftime("%Y-%m-%dT%H:%M:%S")
|
||||
)
|
||||
+ ".mseed"
|
||||
)
|
||||
|
||||
# Create producer for async IO
|
||||
prod = _WaveformProducer(req, ro, rs, fileName, trackerList)
|
||||
req.registerProducer(prod, True)
|
||||
prod.resumeProducing()
|
||||
|
||||
# The request is handled by the deferred object
|
||||
return server.NOT_DONE_YET
|
||||
|
||||
|
||||
# vim: ts=4 et
|
1020
lib/python/seiscomp/fdsnws/event.py
Normal file
1020
lib/python/seiscomp/fdsnws/event.py
Normal file
File diff suppressed because it is too large
Load Diff
216
lib/python/seiscomp/fdsnws/fastsds.py
Normal file
216
lib/python/seiscomp/fdsnws/fastsds.py
Normal file
@ -0,0 +1,216 @@
|
||||
################################################################################
|
||||
# Copyright (C) 2014-2017 by GFZ Potsdam
|
||||
#
|
||||
# Classes to access an SDS structure to be used by the Dataselect-WS
|
||||
#
|
||||
# Author: Javier Quinteros
|
||||
# Email: javier@gfz-potsdam.de
|
||||
################################################################################
|
||||
|
||||
import datetime
|
||||
import os
|
||||
|
||||
import seiscomp.logging
|
||||
import seiscomp.mseedlite
|
||||
|
||||
|
||||
class SDS:
|
||||
def __init__(self, sdsRoot):
|
||||
if isinstance(sdsRoot, list):
|
||||
self.sdsRoot = sdsRoot
|
||||
|
||||
else:
|
||||
self.sdsRoot = [sdsRoot]
|
||||
|
||||
def __getMSName(self, reqDate, net, sta, loc, cha):
|
||||
for root in self.sdsRoot:
|
||||
yield (
|
||||
f"{root}/{reqDate.year}/{net}/{sta}/{cha}.D/{net}.{sta}.{loc}.{cha}.D."
|
||||
f"{reqDate.year}.{reqDate.strftime('%j')}"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def __time2recno(msFile, reclen, timeStart, recStart, timeEnd, recEnd, searchTime):
|
||||
if searchTime <= timeStart:
|
||||
msFile.seek(recStart * reclen)
|
||||
rec = seiscomp.mseedlite.Record(msFile)
|
||||
return (recStart, rec.end_time)
|
||||
|
||||
if searchTime >= timeEnd:
|
||||
msFile.seek(recEnd * reclen)
|
||||
rec = seiscomp.mseedlite.Record(msFile)
|
||||
return (recEnd, rec.end_time)
|
||||
|
||||
t1 = timeStart
|
||||
r1 = recStart
|
||||
t2 = timeEnd
|
||||
r2 = recEnd
|
||||
rn = int(
|
||||
r1
|
||||
+ (r2 - r1) * (searchTime - t1).total_seconds() / (t2 - t1).total_seconds()
|
||||
)
|
||||
|
||||
rn = max(rn, recStart)
|
||||
rn = min(rn, recEnd)
|
||||
|
||||
while True:
|
||||
msFile.seek(rn * reclen)
|
||||
rec = seiscomp.mseedlite.Record(msFile)
|
||||
|
||||
if rec.begin_time < searchTime:
|
||||
r1 = rn
|
||||
t1 = rec.begin_time
|
||||
|
||||
if t1 == t2:
|
||||
break
|
||||
|
||||
rn = int(
|
||||
r1
|
||||
+ (r2 - r1)
|
||||
* (searchTime - t1).total_seconds()
|
||||
/ (t2 - t1).total_seconds()
|
||||
)
|
||||
|
||||
rn = max(rn, recStart)
|
||||
rn = min(rn, recEnd)
|
||||
|
||||
if rn == r1:
|
||||
break
|
||||
|
||||
else:
|
||||
r2 = rn
|
||||
t2 = rec.begin_time
|
||||
|
||||
if t1 == t2:
|
||||
break
|
||||
|
||||
rn = int(
|
||||
r2
|
||||
- (r2 - r1)
|
||||
* (t2 - searchTime).total_seconds()
|
||||
/ (t2 - t1).total_seconds()
|
||||
)
|
||||
|
||||
rn = max(rn, recStart)
|
||||
rn = min(rn, recEnd)
|
||||
|
||||
if rn == r2:
|
||||
break
|
||||
|
||||
return rn, rec.end_time
|
||||
|
||||
def __getWaveform(self, startt, endt, msFile, bufferSize):
|
||||
if startt >= endt:
|
||||
return
|
||||
|
||||
rec = seiscomp.mseedlite.Record(msFile)
|
||||
reclen = rec.size
|
||||
recStart = 0
|
||||
timeStart = rec.begin_time
|
||||
|
||||
if rec.begin_time >= endt:
|
||||
return
|
||||
|
||||
msFile.seek(-reclen, 2)
|
||||
rec = seiscomp.mseedlite.Record(msFile)
|
||||
recEnd = msFile.tell() // reclen - 1
|
||||
timeEnd = rec.begin_time
|
||||
|
||||
if rec.end_time <= startt:
|
||||
return
|
||||
|
||||
if timeStart >= timeEnd:
|
||||
seiscomp.logging.error(
|
||||
f"{msFile.name}: overlap detected (start={timeStart}, end={timeEnd})"
|
||||
)
|
||||
return
|
||||
|
||||
(lower, _) = self.__time2recno(
|
||||
msFile, reclen, timeStart, recStart, timeEnd, recEnd, startt
|
||||
)
|
||||
(upper, _) = self.__time2recno(
|
||||
msFile, reclen, startt, lower, timeEnd, recEnd, endt
|
||||
)
|
||||
|
||||
if upper < lower:
|
||||
seiscomp.logging.error(
|
||||
f"{msFile.name}: overlap detected (lower={lower}, upper={upper})"
|
||||
)
|
||||
upper = lower
|
||||
|
||||
msFile.seek(lower * reclen)
|
||||
remaining = (upper - lower + 1) * reclen
|
||||
check = True
|
||||
|
||||
if bufferSize % reclen:
|
||||
bufferSize += reclen - bufferSize % reclen
|
||||
|
||||
while remaining > 0:
|
||||
size = min(remaining, bufferSize)
|
||||
data = msFile.read(size)
|
||||
remaining -= size
|
||||
offset = 0
|
||||
|
||||
if not data:
|
||||
return
|
||||
|
||||
if check:
|
||||
while offset < len(data):
|
||||
rec = seiscomp.mseedlite.Record(data[offset : offset + reclen])
|
||||
|
||||
if rec.begin_time >= endt:
|
||||
return
|
||||
|
||||
if rec.end_time > startt:
|
||||
break
|
||||
|
||||
offset += reclen
|
||||
|
||||
check = False
|
||||
|
||||
if offset < len(data):
|
||||
yield data[offset:] if offset else data
|
||||
|
||||
while True:
|
||||
data = msFile.read(reclen)
|
||||
|
||||
if not data:
|
||||
return
|
||||
|
||||
rec = seiscomp.mseedlite.Record(data)
|
||||
|
||||
if rec.begin_time >= endt:
|
||||
return
|
||||
|
||||
yield data
|
||||
|
||||
def __getDayRaw(self, day, startt, endt, net, sta, loc, cha, bufferSize):
|
||||
# Take into account the case of empty location
|
||||
if loc == "--":
|
||||
loc = ""
|
||||
|
||||
for dataFile in self.__getMSName(day, net, sta, loc, cha):
|
||||
if not os.path.exists(dataFile):
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(dataFile, "rb") as msFile:
|
||||
for buf in self.__getWaveform(startt, endt, msFile, bufferSize):
|
||||
yield buf
|
||||
|
||||
except seiscomp.mseedlite.MSeedError as e:
|
||||
seiscomp.logging.error(f"{dataFile}: {e}")
|
||||
|
||||
def getRawBytes(self, startt, endt, net, sta, loc, cha, bufferSize):
|
||||
day = datetime.datetime(
|
||||
startt.year, startt.month, startt.day
|
||||
) - datetime.timedelta(days=1)
|
||||
endDay = datetime.datetime(endt.year, endt.month, endt.day)
|
||||
|
||||
while day <= endDay:
|
||||
for buf in self.__getDayRaw(
|
||||
day, startt, endt, net, sta, loc, cha, bufferSize
|
||||
):
|
||||
yield buf
|
||||
|
||||
day += datetime.timedelta(days=1)
|
296
lib/python/seiscomp/fdsnws/http.py
Normal file
296
lib/python/seiscomp/fdsnws/http.py
Normal file
@ -0,0 +1,296 @@
|
||||
################################################################################
|
||||
# Copyright (C) 2013-2014 by gempa GmbH
|
||||
#
|
||||
# HTTP -- Utility methods which generate HTTP result strings
|
||||
#
|
||||
# Author: Stephan Herrnkind
|
||||
# Email: herrnkind@gempa.de
|
||||
################################################################################
|
||||
|
||||
from twisted.web import http, resource, server, static, util
|
||||
|
||||
import seiscomp.core
|
||||
import seiscomp.logging
|
||||
|
||||
from .utils import accessLog, b_str, u_str, writeTSBin
|
||||
|
||||
VERSION = "1.2.5"
|
||||
|
||||
################################################################################
|
||||
|
||||
|
||||
class HTTP:
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def renderErrorPage(request, code, msg, version=VERSION, ro=None):
|
||||
resp = b"""\
|
||||
Error %i: %s
|
||||
|
||||
%s
|
||||
|
||||
Usage details are available from %s
|
||||
|
||||
Request:
|
||||
%s
|
||||
|
||||
Request Submitted:
|
||||
%s
|
||||
|
||||
Service Version:
|
||||
%s
|
||||
"""
|
||||
|
||||
noContent = code == http.NO_CONTENT
|
||||
|
||||
# rewrite response code if requested and no data was found
|
||||
if noContent and ro is not None:
|
||||
code = ro.noData
|
||||
|
||||
# set response code
|
||||
request.setResponseCode(code)
|
||||
|
||||
# status code 204 requires no message body
|
||||
if code == http.NO_CONTENT:
|
||||
response = b""
|
||||
else:
|
||||
request.setHeader("Content-Type", "text/plain; charset=utf-8")
|
||||
|
||||
reference = b"%s/" % request.path.rpartition(b"/")[0]
|
||||
|
||||
codeStr = http.RESPONSES[code]
|
||||
date = b_str(seiscomp.core.Time.GMT().toString("%FT%T.%f"))
|
||||
response = resp % (
|
||||
code,
|
||||
codeStr,
|
||||
b_str(msg),
|
||||
reference,
|
||||
request.uri,
|
||||
date,
|
||||
b_str(version),
|
||||
)
|
||||
if not noContent:
|
||||
seiscomp.logging.warning(
|
||||
f"responding with error: {code} ({u_str(codeStr)})"
|
||||
)
|
||||
|
||||
accessLog(request, ro, code, len(response), msg)
|
||||
return response
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def renderNotFound(request, version=VERSION):
|
||||
msg = "The requested resource does not exist on this server."
|
||||
return HTTP.renderErrorPage(request, http.NOT_FOUND, msg, version)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def renderNotModified(request, ro=None):
|
||||
code = http.NOT_MODIFIED
|
||||
request.setResponseCode(code)
|
||||
request.responseHeaders.removeHeader("Content-Type")
|
||||
accessLog(request, ro, code, 0, None)
|
||||
|
||||
|
||||
################################################################################
|
||||
class ServiceVersion(resource.Resource):
|
||||
isLeaf = True
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self, version):
|
||||
super().__init__()
|
||||
|
||||
self.version = version
|
||||
self.type = "text/plain"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render(self, request):
|
||||
request.setHeader("Content-Type", "text/plain; charset=utf-8")
|
||||
return b_str(self.version)
|
||||
|
||||
|
||||
################################################################################
|
||||
class WADLFilter(static.Data):
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self, path, paramNameFilterList):
|
||||
data = ""
|
||||
removeParam = False
|
||||
with open(path, "r", encoding="utf-8") as fp:
|
||||
for line in fp:
|
||||
lineStripped = line.strip().replace(" ", "")
|
||||
if removeParam:
|
||||
if "</param>" in lineStripped:
|
||||
removeParam = False
|
||||
continue
|
||||
|
||||
valid = True
|
||||
if "<param" in lineStripped:
|
||||
for f in paramNameFilterList:
|
||||
if f'name="{f}"' in lineStripped:
|
||||
valid = False
|
||||
if lineStripped[-2:] != "/>":
|
||||
removeParam = True
|
||||
break
|
||||
|
||||
if valid:
|
||||
data += line
|
||||
|
||||
super().__init__(b_str(data), "application/xml; charset=utf-8")
|
||||
|
||||
|
||||
################################################################################
|
||||
class BaseResource(resource.Resource):
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self, version=VERSION):
|
||||
super().__init__()
|
||||
|
||||
self.version = version
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def renderErrorPage(self, request, code, msg, ro=None):
|
||||
return HTTP.renderErrorPage(request, code, msg, self.version, ro)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def writeErrorPage(self, request, code, msg, ro=None):
|
||||
data = self.renderErrorPage(request, code, msg, ro)
|
||||
if data:
|
||||
writeTSBin(request, data)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def returnNotModified(self, request, ro=None):
|
||||
HTTP.renderNotModified(request, ro)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Renders error page if the result set exceeds the configured maximum number
|
||||
# objects
|
||||
def checkObjects(self, request, objCount, maxObj):
|
||||
if objCount <= maxObj:
|
||||
return True
|
||||
|
||||
msg = (
|
||||
"The result set of your request exceeds the configured maximum "
|
||||
f"number of objects ({maxObj}). Refine your request parameters."
|
||||
)
|
||||
self.writeErrorPage(request, http.REQUEST_ENTITY_TOO_LARGE, msg)
|
||||
return False
|
||||
|
||||
|
||||
################################################################################
|
||||
class NoResource(BaseResource):
|
||||
isLeaf = True
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render(self, request):
|
||||
return HTTP.renderNotFound(request, self.version)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def getChild(self, _path, _request):
|
||||
return self
|
||||
|
||||
|
||||
################################################################################
|
||||
class ListingResource(BaseResource):
|
||||
html = """<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="author" content="gempa GmbH">
|
||||
<title>SeisComP FDSNWS Implementation</title>
|
||||
</head>
|
||||
<body>
|
||||
<p><a href="../">Parent Directory</a></p>
|
||||
<h1>SeisComP FDSNWS Web Service</h1>
|
||||
<p>Index of %s</p>
|
||||
<ul>
|
||||
%s
|
||||
</ul>
|
||||
</body>"""
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render(self, request):
|
||||
lis = ""
|
||||
if request.path[-1:] != b"/":
|
||||
return util.redirectTo(request.path + b"/", request)
|
||||
|
||||
for k, v in self.children.items():
|
||||
if v.isLeaf:
|
||||
continue
|
||||
if hasattr(v, "hideInListing") and v.hideInListing:
|
||||
continue
|
||||
name = u_str(k)
|
||||
lis += f'<li><a href="{name}/">{name}/</a></li>\n'
|
||||
|
||||
return b_str(ListingResource.html % (u_str(request.path), lis))
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def getChild(self, path, _request):
|
||||
if not path:
|
||||
return self
|
||||
|
||||
return NoResource(self.version)
|
||||
|
||||
|
||||
################################################################################
|
||||
class DirectoryResource(static.File):
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self, fileName, version=VERSION):
|
||||
super().__init__(fileName)
|
||||
|
||||
self.version = version
|
||||
self.childNotFound = NoResource(self.version)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render(self, request):
|
||||
if request.path[-1:] != b"/":
|
||||
return util.redirectTo(request.path + b"/", request)
|
||||
|
||||
return static.File.render(self, request)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def getChild(self, path, _request):
|
||||
if not path:
|
||||
return self
|
||||
|
||||
return NoResource(self.version)
|
||||
|
||||
|
||||
################################################################################
|
||||
class Site(server.Site):
|
||||
def __init__(self, res, corsOrigins):
|
||||
super().__init__(res)
|
||||
|
||||
self._corsOrigins = corsOrigins
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def getResourceFor(self, request):
|
||||
seiscomp.logging.debug(
|
||||
f"request ({request.getClientIP()}): {u_str(request.uri)}"
|
||||
)
|
||||
request.setHeader("Server", f"SeisComP-FDSNWS/{VERSION}")
|
||||
request.setHeader("Access-Control-Allow-Headers", "Authorization")
|
||||
request.setHeader("Access-Control-Expose-Headers", "WWW-Authenticate")
|
||||
|
||||
self.setAllowOrigin(request)
|
||||
|
||||
return server.Site.getResourceFor(self, request)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def setAllowOrigin(self, req):
|
||||
# no allowed origin: no response header
|
||||
lenOrigins = len(self._corsOrigins)
|
||||
if lenOrigins == 0:
|
||||
return
|
||||
|
||||
# one origin: add header
|
||||
if lenOrigins == 1:
|
||||
req.setHeader("Access-Control-Allow-Origin", self._corsOrigins[0])
|
||||
return
|
||||
|
||||
# more than one origin: check current origin against allowed origins
|
||||
# and return the current origin on match.
|
||||
origin = req.getHeader("Origin")
|
||||
if origin in self._corsOrigins:
|
||||
req.setHeader("Access-Control-Allow-Origin", origin)
|
||||
|
||||
# Set Vary header to let the browser know that the response depends
|
||||
# on the request. Certain cache strategies should be disabled.
|
||||
req.setHeader("Vary", "Origin")
|
101
lib/python/seiscomp/fdsnws/log.py
Normal file
101
lib/python/seiscomp/fdsnws/log.py
Normal file
@ -0,0 +1,101 @@
|
||||
################################################################################
|
||||
# Copyright (C) 2013-2014 gempa GmbH
|
||||
#
|
||||
# Thread-safe file logger
|
||||
#
|
||||
# Author: Stephan Herrnkind
|
||||
# Email: herrnkind@gempa.de
|
||||
################################################################################
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import threading
|
||||
|
||||
from queue import Queue
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _worker(log):
|
||||
while True:
|
||||
# pylint: disable=W0212
|
||||
msg = log._queue.get()
|
||||
log._write(str(msg))
|
||||
log._queue.task_done()
|
||||
|
||||
|
||||
################################################################################
|
||||
class Log:
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self, filePath, archiveSize=7):
|
||||
self._filePath = filePath
|
||||
self._basePath = os.path.dirname(filePath)
|
||||
self._fileName = os.path.basename(filePath)
|
||||
self._archiveSize = archiveSize
|
||||
self._queue = Queue()
|
||||
self._lastLogTime = None
|
||||
self._fd = None
|
||||
|
||||
self._archiveSize = max(self._archiveSize, 0)
|
||||
|
||||
# worker thread, responsible for writing messages to file
|
||||
t = threading.Thread(target=_worker, args=(self,))
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def __del__(self):
|
||||
# wait for worker thread to write all pending log messages
|
||||
self._queue.join()
|
||||
|
||||
if self._fd is not None:
|
||||
self._fd.close()
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def log(self, msg):
|
||||
self._queue.put(msg)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def _rotate(self):
|
||||
self._fd.close()
|
||||
self._fd = None
|
||||
|
||||
try:
|
||||
pattern = f"{self._filePath}.%i"
|
||||
for i in range(self._archiveSize, 1, -1):
|
||||
src = pattern % (i - 1)
|
||||
if os.path.isfile(src):
|
||||
os.rename(pattern % (i - 1), pattern % i)
|
||||
os.rename(self._filePath, pattern % 1)
|
||||
except Exception as e:
|
||||
print(f"failed to rotate access log: {e}", file=sys.stderr)
|
||||
|
||||
self._fd = open(self._filePath, "w", encoding="utf-8")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def _write(self, msg):
|
||||
try:
|
||||
now = time.localtime()
|
||||
if self._fd is None:
|
||||
if self._basePath and not os.path.exists(self._basePath):
|
||||
os.makedirs(self._basePath)
|
||||
self._fd = open(self._filePath, "a", encoding="utf-8")
|
||||
elif (
|
||||
self._archiveSize > 0
|
||||
and self._lastLogTime is not None
|
||||
and (
|
||||
self._lastLogTime.tm_yday != now.tm_yday
|
||||
or self._lastLogTime.tm_year != now.tm_year
|
||||
)
|
||||
):
|
||||
self._rotate()
|
||||
|
||||
print(msg, file=self._fd)
|
||||
self._fd.flush()
|
||||
self._lastLogTime = now
|
||||
except Exception as e:
|
||||
print(f"access log: {e}", file=sys.stderr)
|
||||
|
||||
|
||||
# vim: ts=4 et
|
138
lib/python/seiscomp/fdsnws/reqlog.py
Normal file
138
lib/python/seiscomp/fdsnws/reqlog.py
Normal file
@ -0,0 +1,138 @@
|
||||
import os
|
||||
import datetime
|
||||
import json
|
||||
import hashlib
|
||||
import subprocess
|
||||
import logging
|
||||
import logging.handlers
|
||||
import threading
|
||||
|
||||
|
||||
from .utils import b_str
|
||||
|
||||
mutex = threading.Lock()
|
||||
|
||||
|
||||
class MyFileHandler(logging.handlers.TimedRotatingFileHandler):
|
||||
def __init__(self, filename):
|
||||
super().__init__(filename, when="midnight", utc=True)
|
||||
|
||||
def rotate(self, source, dest):
|
||||
super().rotate(source, dest)
|
||||
|
||||
if os.path.exists(dest):
|
||||
subprocess.Popen(["bzip2", dest])
|
||||
|
||||
|
||||
class Tracker:
|
||||
def __init__(self, logger, geoip, service, userName, userIP, clientID, userSalt):
|
||||
self.__logger = logger
|
||||
self.__userName = userName
|
||||
self.__userSalt = userSalt
|
||||
self.__logged = False
|
||||
|
||||
if userName:
|
||||
userID = int(
|
||||
hashlib.md5(b_str(userSalt + userName.lower())).hexdigest()[:8], 16
|
||||
)
|
||||
else:
|
||||
userID = int(hashlib.md5(b_str(userSalt + userIP)).hexdigest()[:8], 16)
|
||||
|
||||
self.__data = {
|
||||
"service": service,
|
||||
"userID": userID,
|
||||
"clientID": clientID,
|
||||
"userEmail": None,
|
||||
"auth": bool(userName),
|
||||
"userLocation": {},
|
||||
"created": f"{datetime.datetime.utcnow().isoformat()}Z",
|
||||
}
|
||||
|
||||
if geoip:
|
||||
self.__data["userLocation"]["country"] = geoip.country_code_by_addr(userIP)
|
||||
|
||||
if (
|
||||
userName and userName.lower().endswith("@gfz-potsdam.de")
|
||||
) or userIP.startswith("139.17."):
|
||||
self.__data["userLocation"]["institution"] = "GFZ"
|
||||
|
||||
# pylint: disable=W0613
|
||||
def line_status(
|
||||
self,
|
||||
start_time,
|
||||
end_time,
|
||||
network,
|
||||
station,
|
||||
channel,
|
||||
location,
|
||||
restricted,
|
||||
net_class,
|
||||
shared,
|
||||
constraints,
|
||||
volume,
|
||||
status,
|
||||
size,
|
||||
message,
|
||||
):
|
||||
try:
|
||||
trace = self.__data["trace"]
|
||||
|
||||
except KeyError:
|
||||
trace = []
|
||||
self.__data["trace"] = trace
|
||||
|
||||
trace.append(
|
||||
{
|
||||
"net": network,
|
||||
"sta": station,
|
||||
"loc": location,
|
||||
"cha": channel,
|
||||
"start": start_time.iso(),
|
||||
"end": end_time.iso(),
|
||||
"restricted": restricted,
|
||||
"status": status,
|
||||
"bytes": size,
|
||||
}
|
||||
)
|
||||
|
||||
if restricted and status == "OK":
|
||||
self.__data["userEmail"] = self.__userName
|
||||
|
||||
# FDSNWS requests have one volume, so volume_status() is called once per request
|
||||
def volume_status(self, volume, status, size, message):
|
||||
self.__data["status"] = status
|
||||
self.__data["bytes"] = size
|
||||
self.__data["finished"] = f"{datetime.datetime.utcnow().isoformat()}Z"
|
||||
|
||||
def request_status(self, status, message):
|
||||
with mutex:
|
||||
if not self.__logged:
|
||||
self.__logger.info(json.dumps(self.__data))
|
||||
self.__logged = True
|
||||
|
||||
|
||||
class RequestLog:
|
||||
def __init__(self, filename, userSalt):
|
||||
self.__logger = logging.getLogger("seiscomp.fdsnws.reqlog")
|
||||
self.__logger.addHandler(MyFileHandler(filename))
|
||||
self.__logger.setLevel(logging.INFO)
|
||||
self.__userSalt = userSalt
|
||||
|
||||
try:
|
||||
import GeoIP
|
||||
|
||||
self.__geoip = GeoIP.new(GeoIP.GEOIP_MEMORY_CACHE)
|
||||
|
||||
except ImportError:
|
||||
self.__geoip = None
|
||||
|
||||
def tracker(self, service, userName, userIP, clientID):
|
||||
return Tracker(
|
||||
self.__logger,
|
||||
self.__geoip,
|
||||
service,
|
||||
userName,
|
||||
userIP,
|
||||
clientID,
|
||||
self.__userSalt,
|
||||
)
|
179
lib/python/seiscomp/fdsnws/reqtrack.py
Normal file
179
lib/python/seiscomp/fdsnws/reqtrack.py
Normal file
@ -0,0 +1,179 @@
|
||||
from twisted.internet import reactor
|
||||
import seiscomp.core
|
||||
import seiscomp.datamodel
|
||||
|
||||
|
||||
def callFromThread(f):
|
||||
def wrap(*args, **kwargs):
|
||||
reactor.callFromThread(f, *args, **kwargs)
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
def enableNotifier(f):
|
||||
def wrap(*args, **kwargs):
|
||||
saveState = seiscomp.datamodel.Notifier.IsEnabled()
|
||||
seiscomp.datamodel.Notifier.SetEnabled(True)
|
||||
f(*args, **kwargs)
|
||||
seiscomp.datamodel.Notifier.SetEnabled(saveState)
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
class RequestTrackerDB(object):
|
||||
def __init__(
|
||||
self,
|
||||
appName,
|
||||
msgConn,
|
||||
req_id,
|
||||
req_type,
|
||||
user,
|
||||
header,
|
||||
label,
|
||||
user_ip,
|
||||
client_ip,
|
||||
):
|
||||
self.msgConn = msgConn
|
||||
self.arclinkRequest = seiscomp.datamodel.ArclinkRequest.Create()
|
||||
self.arclinkRequest.setCreated(seiscomp.core.Time.GMT())
|
||||
self.arclinkRequest.setRequestID(req_id)
|
||||
self.arclinkRequest.setUserID(str(user))
|
||||
self.arclinkRequest.setClientID(appName)
|
||||
if user_ip:
|
||||
self.arclinkRequest.setUserIP(user_ip)
|
||||
if client_ip:
|
||||
self.arclinkRequest.setClientIP(client_ip)
|
||||
self.arclinkRequest.setType(req_type)
|
||||
self.arclinkRequest.setLabel(label)
|
||||
self.arclinkRequest.setHeader(header)
|
||||
|
||||
self.averageTimeWindow = seiscomp.core.TimeSpan(0.0)
|
||||
self.totalLineCount = 0
|
||||
self.okLineCount = 0
|
||||
|
||||
self.requestLines = []
|
||||
self.statusLines = []
|
||||
|
||||
def send(self):
|
||||
msg = seiscomp.datamodel.Notifier.GetMessage(True)
|
||||
if msg:
|
||||
self.msgConn.send("LOGGING", msg)
|
||||
|
||||
def line_status(
|
||||
self,
|
||||
start_time,
|
||||
end_time,
|
||||
network,
|
||||
station,
|
||||
channel,
|
||||
location,
|
||||
restricted,
|
||||
net_class,
|
||||
shared,
|
||||
constraints,
|
||||
volume,
|
||||
status,
|
||||
size,
|
||||
message,
|
||||
):
|
||||
if network is None or network == "":
|
||||
network = "."
|
||||
if station is None or station == "":
|
||||
station = "."
|
||||
if channel is None or channel == "":
|
||||
channel = "."
|
||||
if location is None or location == "":
|
||||
location = "."
|
||||
if volume is None:
|
||||
volume = "NODATA"
|
||||
if size is None:
|
||||
size = 0
|
||||
if message is None:
|
||||
message = ""
|
||||
|
||||
if isinstance(constraints, list):
|
||||
constr = " ".join(constraints)
|
||||
else:
|
||||
constr = " ".join([f"{a}={b}" for (a, b) in constraints.items()])
|
||||
|
||||
arclinkRequestLine = seiscomp.datamodel.ArclinkRequestLine()
|
||||
arclinkRequestLine.setStart(start_time)
|
||||
arclinkRequestLine.setEnd(end_time)
|
||||
arclinkRequestLine.setStreamID(
|
||||
seiscomp.datamodel.WaveformStreamID(
|
||||
network[:8], station[:8], location[:8], channel[:8], ""
|
||||
)
|
||||
)
|
||||
arclinkRequestLine.setConstraints(constr)
|
||||
if isinstance(restricted, bool):
|
||||
arclinkRequestLine.setRestricted(restricted)
|
||||
arclinkRequestLine.setNetClass(net_class)
|
||||
if isinstance(shared, bool):
|
||||
arclinkRequestLine.setShared(shared)
|
||||
#
|
||||
arclinkStatusLine = seiscomp.datamodel.ArclinkStatusLine()
|
||||
arclinkStatusLine.setVolumeID(volume)
|
||||
arclinkStatusLine.setStatus(status)
|
||||
arclinkStatusLine.setSize(size)
|
||||
arclinkStatusLine.setMessage(message)
|
||||
#
|
||||
arclinkRequestLine.setStatus(arclinkStatusLine)
|
||||
self.requestLines.append(arclinkRequestLine)
|
||||
|
||||
self.averageTimeWindow += end_time - start_time
|
||||
self.totalLineCount += 1
|
||||
if status == "OK":
|
||||
self.okLineCount += 1
|
||||
|
||||
def volume_status(self, volume, status, size, message):
|
||||
if volume is None:
|
||||
volume = "NODATA"
|
||||
if size is None:
|
||||
size = 0
|
||||
if message is None:
|
||||
message = ""
|
||||
|
||||
arclinkStatusLine = seiscomp.datamodel.ArclinkStatusLine()
|
||||
arclinkStatusLine.setVolumeID(volume)
|
||||
arclinkStatusLine.setStatus(status)
|
||||
arclinkStatusLine.setSize(size)
|
||||
arclinkStatusLine.setMessage(message)
|
||||
self.statusLines.append(arclinkStatusLine)
|
||||
|
||||
@callFromThread
|
||||
@enableNotifier
|
||||
def request_status(self, status, message):
|
||||
if message is None:
|
||||
message = ""
|
||||
|
||||
self.arclinkRequest.setStatus(status)
|
||||
self.arclinkRequest.setMessage(message)
|
||||
|
||||
ars = seiscomp.datamodel.ArclinkRequestSummary()
|
||||
tw = self.averageTimeWindow.seconds()
|
||||
if self.totalLineCount > 0:
|
||||
# avarage request time window
|
||||
tw = self.averageTimeWindow.seconds() // self.totalLineCount
|
||||
if tw >= 2**31:
|
||||
tw = -1 # prevent 32bit int overflow
|
||||
ars.setAverageTimeWindow(tw)
|
||||
ars.setTotalLineCount(self.totalLineCount)
|
||||
ars.setOkLineCount(self.okLineCount)
|
||||
self.arclinkRequest.setSummary(ars)
|
||||
|
||||
al = seiscomp.datamodel.ArclinkLog()
|
||||
al.add(self.arclinkRequest)
|
||||
|
||||
for obj in self.requestLines:
|
||||
self.arclinkRequest.add(obj)
|
||||
|
||||
for obj in self.statusLines:
|
||||
self.arclinkRequest.add(obj)
|
||||
|
||||
self.send()
|
||||
|
||||
def __verseed_errors(self, volume):
|
||||
pass
|
||||
|
||||
def verseed(self, volume, file):
|
||||
pass
|
609
lib/python/seiscomp/fdsnws/request.py
Normal file
609
lib/python/seiscomp/fdsnws/request.py
Normal file
@ -0,0 +1,609 @@
|
||||
################################################################################
|
||||
# Copyright (C) 2013-2014 gempa GmbH
|
||||
#
|
||||
# RequestOptions -- HTTP GET request parameters
|
||||
#
|
||||
# Author: Stephan Herrnkind
|
||||
# Email: herrnkind@gempa.de
|
||||
################################################################################
|
||||
|
||||
import fnmatch
|
||||
import math
|
||||
import re
|
||||
|
||||
from twisted.web import http
|
||||
|
||||
from seiscomp.core import Time
|
||||
import seiscomp.logging
|
||||
import seiscomp.math
|
||||
|
||||
from .utils import u_str
|
||||
|
||||
|
||||
class RequestOptions:
|
||||
# the match() method matched only patterns at the beginning of a string,
|
||||
# since we have to ensure that no invalid character is present we use the
|
||||
# search() method in combination with a negated pattern instead
|
||||
FloatChars = re.compile(r"[^-0-9.]").search
|
||||
ChannelChars = re.compile(r"[^A-Za-z0-9*?]").search
|
||||
ChannelExtChars = re.compile(r"[^A-Za-z0-9*?+\-_]").search
|
||||
BooleanTrueValues = ["1", "true", "t", "yes", "y"]
|
||||
BooleanFalseValues = ["0", "false", "f", "no", "n"]
|
||||
OutputFormats = [] # override in derived classes
|
||||
|
||||
PStart = ["starttime", "start"]
|
||||
PEnd = ["endtime", "end"]
|
||||
PStartBefore = ["startbefore"]
|
||||
PStartAfter = ["startafter"]
|
||||
PEndBefore = ["endbefore"]
|
||||
PEndAfter = ["endafter"]
|
||||
SimpleTimeParams = PStart + PEnd
|
||||
WindowTimeParams = PStartBefore + PStartAfter + PEndBefore + PEndAfter
|
||||
TimeParams = SimpleTimeParams + WindowTimeParams
|
||||
|
||||
PNet = ["network", "net"]
|
||||
PSta = ["station", "sta"]
|
||||
PLoc = ["location", "loc"]
|
||||
PCha = ["channel", "cha"]
|
||||
StreamParams = PNet + PSta + PLoc + PCha
|
||||
|
||||
PMinLat = ["minlatitude", "minlat"]
|
||||
PMaxLat = ["maxlatitude", "maxlat"]
|
||||
PMinLon = ["minlongitude", "minlon"]
|
||||
PMaxLon = ["maxlongitude", "maxlon"]
|
||||
PLat = ["latitude", "lat"]
|
||||
PLon = ["longitude", "lon"]
|
||||
PMinRadius = ["minradius"]
|
||||
PMaxRadius = ["maxradius"]
|
||||
GeoRectParams = PMinLat + PMaxLat + PMinLon + PMaxLon
|
||||
GeoCircleParams = PLat + PLon + PMinRadius + PMaxRadius
|
||||
GeoParams = GeoRectParams + GeoCircleParams
|
||||
|
||||
PFormat = ["format"]
|
||||
PNoData = ["nodata"]
|
||||
OutputParams = PFormat + PNoData
|
||||
|
||||
POSTParams = OutputParams
|
||||
GETParams = StreamParams + SimpleTimeParams
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
class Channel:
|
||||
def __init__(self):
|
||||
self.net = None
|
||||
self.sta = None
|
||||
self.loc = None
|
||||
self.cha = None
|
||||
|
||||
def matchNet(self, value):
|
||||
return self.match(value, self.net)
|
||||
|
||||
def matchSta(self, value):
|
||||
return self.match(value, self.sta)
|
||||
|
||||
def matchLoc(self, value):
|
||||
return self.match(value, self.loc, True)
|
||||
|
||||
def matchCha(self, value):
|
||||
return self.match(value, self.cha)
|
||||
|
||||
@staticmethod
|
||||
def match(value, globList, testEmpty=False):
|
||||
if not globList:
|
||||
return True
|
||||
|
||||
for glob in globList:
|
||||
if testEmpty and value == "" and glob == "--":
|
||||
return True
|
||||
if fnmatch.fnmatchcase(value, glob):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
class Time:
|
||||
def __init__(self):
|
||||
self.simpleTime = True
|
||||
self.start = None
|
||||
self.end = None
|
||||
# window time only
|
||||
self.startBefore = None
|
||||
self.startAfter = None
|
||||
self.endBefore = None
|
||||
self.endAfter = None
|
||||
|
||||
# used by FDSN Station and DataSelect
|
||||
def match(self, start, end=None):
|
||||
# simple time: limit to epochs intersecting with the specified time
|
||||
# range
|
||||
res = (self.start is None or end is None or end >= self.start) and (
|
||||
self.end is None or start <= self.end
|
||||
)
|
||||
|
||||
# window time: limit to epochs strictly starting or ending before or
|
||||
# after a specified time value
|
||||
if not self.simpleTime:
|
||||
res = (
|
||||
res
|
||||
and (
|
||||
self.startBefore is None
|
||||
or (start is not None and start < self.startBefore)
|
||||
)
|
||||
and (
|
||||
self.startAfter is None
|
||||
or (start is not None and start > self.startAfter)
|
||||
)
|
||||
and (
|
||||
self.endBefore is None
|
||||
or (end is not None and end < self.endBefore)
|
||||
)
|
||||
and (self.endAfter is None or end is None or end > self.endAfter)
|
||||
)
|
||||
|
||||
return res
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
class Geo:
|
||||
# -----------------------------------------------------------------------
|
||||
class BBox:
|
||||
def __init__(self):
|
||||
self.minLat = None
|
||||
self.maxLat = None
|
||||
self.minLon = None
|
||||
self.maxLon = None
|
||||
|
||||
def dateLineCrossing(self):
|
||||
return self.minLon and self.maxLon and self.minLon > self.maxLon
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
class BCircle:
|
||||
def __init__(self):
|
||||
self.lat = None
|
||||
self.lon = None
|
||||
self.minRad = None
|
||||
self.maxRad = None
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Calculates outer bounding box
|
||||
def calculateBBox(self):
|
||||
def rad(degree):
|
||||
return math.radians(degree)
|
||||
|
||||
def deg(radians):
|
||||
return math.degrees(radians)
|
||||
|
||||
b = RequestOptions.Geo.BBox()
|
||||
if self.maxRad is None or self.maxRad >= 180:
|
||||
return b
|
||||
|
||||
b.minLat = self.lat - self.maxRad
|
||||
b.maxLat = self.lat + self.maxRad
|
||||
if b.minLat > -90 and b.maxLat < 90:
|
||||
dLon = deg(
|
||||
math.asin(math.sin(rad(self.maxRad) / math.cos(rad(self.lat))))
|
||||
)
|
||||
b.minLon = self.lon - dLon
|
||||
if b.minLon < -180:
|
||||
b.minLon += 360
|
||||
b.maxLon = self.lon + dLon
|
||||
if b.maxLon > 180:
|
||||
b.maxLon -= 360
|
||||
else:
|
||||
# pole within distance: one latitude and no longitude
|
||||
# restrictions remains
|
||||
if b.minLat <= -90:
|
||||
b.minLat = None
|
||||
else:
|
||||
b.maxLat = None
|
||||
b.minLon = None
|
||||
b.maxLon = None
|
||||
|
||||
return b
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
def __init__(self):
|
||||
self.bBox = None
|
||||
self.bCircle = None
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
def match(self, lat, lon):
|
||||
if self.bBox is not None:
|
||||
b = self.bBox
|
||||
if b.minLat is not None and lat < b.minLat:
|
||||
return False
|
||||
if b.maxLat is not None and lat > b.maxLat:
|
||||
return False
|
||||
# date line crossing if minLon > maxLon
|
||||
if b.dateLineCrossing():
|
||||
return lon >= b.minLon or lon <= b.maxLon
|
||||
if b.minLon is not None and lon < b.minLon:
|
||||
return False
|
||||
if b.maxLon is not None and lon > b.maxLon:
|
||||
return False
|
||||
return True
|
||||
|
||||
if self.bCircle:
|
||||
c = self.bCircle
|
||||
dist = seiscomp.math.delazi(c.lat, c.lon, lat, lon)
|
||||
if c.minRad is not None and dist[0] < c.minRad:
|
||||
return False
|
||||
if c.maxRad is not None and dist[0] > c.maxRad:
|
||||
return False
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self):
|
||||
self.service = ""
|
||||
self.accessTime = Time.GMT()
|
||||
self.userName = None
|
||||
|
||||
self.time = None
|
||||
self.channel = None
|
||||
self.geo = None
|
||||
|
||||
self.noData = http.NO_CONTENT
|
||||
self.format = None
|
||||
|
||||
self._args = {}
|
||||
self.streams = [] # 1 entry for GET, multipl
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseOutput(self):
|
||||
# nodata
|
||||
code = self.parseInt(self.PNoData)
|
||||
if code is not None:
|
||||
if code not in (http.NO_CONTENT, http.NOT_FOUND):
|
||||
self.raiseValueError(self.PNoData[0])
|
||||
self.noData = code
|
||||
|
||||
# format
|
||||
key, value = self.getFirstValue(self.PFormat)
|
||||
if value is None:
|
||||
# no format specified: default to first in list if available
|
||||
if len(self.OutputFormats) > 0:
|
||||
self.format = self.OutputFormats[0]
|
||||
else:
|
||||
value = value.lower()
|
||||
if value in self.OutputFormats:
|
||||
self.format = value
|
||||
else:
|
||||
self.raiseValueError(key)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseChannel(self):
|
||||
c = RequestOptions.Channel()
|
||||
|
||||
c.net = self.parseChannelChars(self.PNet, False, True)
|
||||
c.sta = self.parseChannelChars(self.PSta)
|
||||
c.loc = self.parseChannelChars(self.PLoc, True)
|
||||
c.cha = self.parseChannelChars(self.PCha)
|
||||
|
||||
if c.net or c.sta or c.loc or c.cha:
|
||||
self.channel = c
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseTime(self, parseWindowTime=False):
|
||||
t = RequestOptions.Time()
|
||||
|
||||
# start[time], end[time]
|
||||
t.start = self.parseTimeStr(self.PStart)
|
||||
t.end = self.parseTimeStr(self.PEnd)
|
||||
|
||||
simpleTime = t.start is not None or t.end is not None
|
||||
|
||||
# [start,end][before,after]
|
||||
if parseWindowTime:
|
||||
t.startBefore = self.parseTimeStr(self.PStartBefore)
|
||||
t.startAfter = self.parseTimeStr(self.PStartAfter)
|
||||
t.endBefore = self.parseTimeStr(self.PEndBefore)
|
||||
t.endAfter = self.parseTimeStr(self.PEndAfter)
|
||||
|
||||
windowTime = (
|
||||
t.startBefore is not None
|
||||
or t.startAfter is not None
|
||||
or t.endBefore is not None
|
||||
or t.endAfter is not None
|
||||
)
|
||||
if simpleTime or windowTime:
|
||||
self.time = t
|
||||
self.time.simpleTime = not windowTime
|
||||
|
||||
elif simpleTime:
|
||||
self.time = t
|
||||
self.time.simpleTime = True
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseGeo(self):
|
||||
# bounding box (optional)
|
||||
b = RequestOptions.Geo.BBox()
|
||||
b.minLat = self.parseFloat(self.PMinLat, -90, 90)
|
||||
b.maxLat = self.parseFloat(self.PMaxLat, -90, 90)
|
||||
if b.minLat is not None and b.maxLat is not None and b.minLat > b.maxLat:
|
||||
raise ValueError(f"{self.PMinLat[0]} exceeds {self.PMaxLat[0]}")
|
||||
|
||||
b.minLon = self.parseFloat(self.PMinLon, -180, 180)
|
||||
b.maxLon = self.parseFloat(self.PMaxLon, -180, 180)
|
||||
# maxLon < minLon -> date line crossing
|
||||
|
||||
hasBBoxParam = (
|
||||
b.minLat is not None
|
||||
or b.maxLat is not None
|
||||
or b.minLon is not None
|
||||
or b.maxLon is not None
|
||||
)
|
||||
|
||||
# bounding circle (optional)
|
||||
c = RequestOptions.Geo.BCircle()
|
||||
c.lat = self.parseFloat(self.PLat, -90, 90)
|
||||
c.lon = self.parseFloat(self.PLon, -180, 180)
|
||||
c.minRad = self.parseFloat(self.PMinRadius, 0, 180)
|
||||
c.maxRad = self.parseFloat(self.PMaxRadius, 0, 180)
|
||||
if c.minRad is not None and c.maxRad is not None and c.minRad > c.maxRad:
|
||||
raise ValueError(f"{self.PMinRadius[0]} exceeds {self.PMaxRadius[0]}")
|
||||
|
||||
hasBCircleRadParam = c.minRad is not None or c.maxRad is not None
|
||||
hasBCircleParam = c.lat is not None or c.lon is not None or hasBCircleRadParam
|
||||
|
||||
# bounding box and bounding circle may not be combined
|
||||
if hasBBoxParam and hasBCircleParam:
|
||||
raise ValueError(
|
||||
"bounding box and bounding circle parameters may not be combined"
|
||||
)
|
||||
if hasBBoxParam:
|
||||
self.geo = RequestOptions.Geo()
|
||||
self.geo.bBox = b
|
||||
elif hasBCircleRadParam:
|
||||
self.geo = RequestOptions.Geo()
|
||||
if c.lat is None:
|
||||
c.lat = 0.0
|
||||
if c.lon is None:
|
||||
c.lon = 0.0
|
||||
self.geo.bCircle = c
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def _assertValueRange(key, v, minValue, maxValue):
|
||||
if (minValue is not None and v < minValue) or (
|
||||
maxValue is not None and v > maxValue
|
||||
):
|
||||
minStr, maxStr = "-inf", "inf"
|
||||
if minValue is not None:
|
||||
minStr = str(minValue)
|
||||
if maxValue is not None:
|
||||
maxStr = str(maxValue)
|
||||
raise ValueError(f"parameter not in domain [{minStr},{maxStr}]: {key}")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def raiseValueError(key):
|
||||
raise ValueError(f"invalid value in parameter: {key}")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def getFirstValue(self, keys):
|
||||
for key in keys:
|
||||
if key in self._args:
|
||||
return key, self._args[key][0].strip()
|
||||
|
||||
return None, None
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def getValues(self, keys):
|
||||
v = []
|
||||
for key in keys:
|
||||
if key in self._args:
|
||||
v += self._args[key]
|
||||
return v
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def getListValues(self, keys, lower=False):
|
||||
values = set()
|
||||
for key in keys:
|
||||
if key not in self._args:
|
||||
continue
|
||||
|
||||
for vList in self._args[key]:
|
||||
for v in vList.split(","):
|
||||
if v is None:
|
||||
continue
|
||||
v = v.strip()
|
||||
if lower:
|
||||
v = v.lower()
|
||||
values.add(v)
|
||||
|
||||
return values
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseInt(self, keys, minValue=None, maxValue=None):
|
||||
key, value = self.getFirstValue(keys)
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
i = int(value)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"invalid integer value in parameter: {key}") from e
|
||||
|
||||
self._assertValueRange(key, i, minValue, maxValue)
|
||||
return i
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseFloat(self, keys, minValue=None, maxValue=None):
|
||||
key, value = self.getFirstValue(keys)
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
if self.FloatChars(value):
|
||||
raise ValueError(
|
||||
f"invalid characters in float parameter: {key} (scientific notation "
|
||||
"forbidden by spec)"
|
||||
)
|
||||
|
||||
try:
|
||||
f = float(value)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"invalid float value in parameter: {key}") from e
|
||||
|
||||
self._assertValueRange(key, f, minValue, maxValue)
|
||||
return f
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseBool(self, keys):
|
||||
key, value = self.getFirstValue(keys)
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
value = value.lower()
|
||||
if value in self.BooleanTrueValues:
|
||||
return True
|
||||
if value in self.BooleanFalseValues:
|
||||
return False
|
||||
|
||||
raise ValueError(f"invalid boolean value in parameter: {key}")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseTimeStr(self, keys):
|
||||
key, value = self.getFirstValue(keys)
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
time = Time.FromString(value)
|
||||
# use explicit test for None here since bool value for epoch date
|
||||
# (1970-01-01) is False
|
||||
if time is None:
|
||||
raise ValueError(f"invalid date format in parameter: {key}")
|
||||
|
||||
return time
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseChannelChars(self, keys, allowEmpty=False, useExtChars=False):
|
||||
# channel parameters may be specified as a comma separated list and may
|
||||
# be repeated several times
|
||||
values = None
|
||||
for vList in self.getValues(keys):
|
||||
if values is None:
|
||||
values = []
|
||||
for v in vList.split(","):
|
||||
v = v.strip()
|
||||
if allowEmpty and (v == "--" or len(v) == 0):
|
||||
values.append("--")
|
||||
continue
|
||||
|
||||
if (useExtChars and self.ChannelExtChars(v)) or (
|
||||
not useExtChars and self.ChannelChars(v)
|
||||
):
|
||||
raise ValueError(f"invalid characters in parameter: {keys[0]}")
|
||||
values.append(v)
|
||||
|
||||
return values
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseGET(self, args):
|
||||
# transform keys to lower case
|
||||
if args is not None:
|
||||
for k, v in args.items():
|
||||
k = u_str(k.lower())
|
||||
if k not in self.GETParams:
|
||||
raise ValueError(f"invalid param: {k}")
|
||||
|
||||
self._args[k] = [u_str(x) for x in v]
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parsePOST(self, content):
|
||||
nLine = 0
|
||||
|
||||
for line in content:
|
||||
nLine += 1
|
||||
line = u_str(line.strip())
|
||||
|
||||
# ignore empty and comment lines
|
||||
if len(line) == 0 or line[0] == "#":
|
||||
continue
|
||||
|
||||
# collect parameter (non stream lines)
|
||||
toks = line.split("=", 1)
|
||||
if len(toks) > 1:
|
||||
key = toks[0].strip().lower()
|
||||
|
||||
isPOSTParam = False
|
||||
for p in self.POSTParams:
|
||||
if p == key:
|
||||
if key not in self._args:
|
||||
self._args[key] = []
|
||||
self._args[key].append(toks[1].strip())
|
||||
isPOSTParam = True
|
||||
break
|
||||
|
||||
if isPOSTParam:
|
||||
continue
|
||||
|
||||
# time parameters not allowed in POST header
|
||||
for p in self.TimeParams:
|
||||
if p == key:
|
||||
raise ValueError(
|
||||
f"time parameter in line {nLine} not allowed in POST "
|
||||
"request"
|
||||
)
|
||||
|
||||
# stream parameters not allowed in POST header
|
||||
for p in self.StreamParams:
|
||||
if p == key:
|
||||
raise ValueError(
|
||||
f"stream parameter in line {nLine} not allowed in POST "
|
||||
"request"
|
||||
)
|
||||
|
||||
raise ValueError(f"invalid parameter in line {nLine}")
|
||||
|
||||
# stream parameters
|
||||
toks = line.split()
|
||||
nToks = len(toks)
|
||||
if nToks not in (5, 6):
|
||||
raise ValueError("invalid number of stream components in line {nLine}")
|
||||
|
||||
ro = RequestOptions()
|
||||
|
||||
# net, sta, loc, cha
|
||||
ro.channel = RequestOptions.Channel()
|
||||
ro.channel.net = toks[0].split(",")
|
||||
ro.channel.sta = toks[1].split(",")
|
||||
ro.channel.loc = toks[2].split(",")
|
||||
ro.channel.cha = toks[3].split(",")
|
||||
|
||||
msg = "invalid %s value in line %i"
|
||||
for net in ro.channel.net:
|
||||
if ro.ChannelChars(net):
|
||||
raise ValueError(msg % ("network", nLine))
|
||||
for sta in ro.channel.sta:
|
||||
if ro.ChannelChars(sta):
|
||||
raise ValueError(msg % ("station", nLine))
|
||||
for loc in ro.channel.loc:
|
||||
if loc != "--" and ro.ChannelChars(loc):
|
||||
raise ValueError(msg % ("location", nLine))
|
||||
for cha in ro.channel.cha:
|
||||
if ro.ChannelChars(cha):
|
||||
raise ValueError(msg % ("channel", nLine))
|
||||
|
||||
# start/end time
|
||||
ro.time = RequestOptions.Time()
|
||||
ro.time.start = Time.FromString(toks[4])
|
||||
logEnd = "-"
|
||||
if len(toks) > 5:
|
||||
ro.time.end = Time.FromString(toks[5])
|
||||
logEnd = ro.time.end.iso()
|
||||
|
||||
seiscomp.logging.debug(
|
||||
f"ro: {ro.channel.net}.{ro.channel.sta}.{ro.channel.loc}."
|
||||
f"{ro.channel.cha} {ro.time.start.iso()} {logEnd}"
|
||||
)
|
||||
self.streams.append(ro)
|
||||
|
||||
if not self.streams:
|
||||
raise ValueError("at least one stream line is required")
|
||||
|
||||
|
||||
# vim: ts=4 et
|
936
lib/python/seiscomp/fdsnws/station.py
Normal file
936
lib/python/seiscomp/fdsnws/station.py
Normal file
@ -0,0 +1,936 @@
|
||||
################################################################################
|
||||
# Copyright (C) 2013-2014 gempa GmbH
|
||||
#
|
||||
# FDSNStation -- Implements the fdsnws-station Web service, see
|
||||
# http://www.fdsn.org/webservices/
|
||||
#
|
||||
# Feature notes:
|
||||
# - 'updatedafter' request parameter not implemented: The last modification
|
||||
# time in SeisComP is tracked on the object level. If a child of an object
|
||||
# is updated the update time is not propagated to all parents. In order to
|
||||
# check if a station was updated all children must be evaluated recursively.
|
||||
# This operation would be much to expensive.
|
||||
# - additional request parameters:
|
||||
# - formatted: boolean, default: false
|
||||
# - additional values of request parameters:
|
||||
# - format
|
||||
# - standard: [xml, text]
|
||||
# - additional: [fdsnxml (=xml), stationxml, sc3ml]
|
||||
# - default: xml
|
||||
#
|
||||
# Author: Stephan Herrnkind
|
||||
# Email: herrnkind@gempa.de
|
||||
################################################################################
|
||||
|
||||
from twisted.internet.threads import deferToThread
|
||||
from twisted.web import http, server
|
||||
|
||||
import seiscomp.datamodel
|
||||
import seiscomp.logging
|
||||
from seiscomp.client import Application
|
||||
from seiscomp.core import Time
|
||||
from seiscomp.io import Exporter, ExportObjectList
|
||||
|
||||
from .http import BaseResource
|
||||
from .request import RequestOptions
|
||||
from . import utils
|
||||
|
||||
VERSION = "1.1.6"
|
||||
|
||||
################################################################################
|
||||
|
||||
|
||||
class _StationRequestOptions(RequestOptions):
|
||||
Exporters = {
|
||||
"xml": "fdsnxml",
|
||||
"fdsnxml": "fdsnxml",
|
||||
"stationxml": "staxml",
|
||||
"sc3ml": "trunk",
|
||||
}
|
||||
MinTime = Time(0, 1)
|
||||
|
||||
VText = ["text"]
|
||||
# OutputFormats = list(Exporters) + VText
|
||||
# Default format must be the first, list(Exporters) has random order
|
||||
OutputFormats = ["xml", "fdsnxml", "stationxml", "sc3ml"] + VText
|
||||
|
||||
PLevel = ["level"]
|
||||
PIncludeRestricted = ["includerestricted"]
|
||||
PIncludeAvailability = ["includeavailability"]
|
||||
PUpdateAfter = ["updateafter"]
|
||||
PMatchTimeSeries = ["matchtimeseries"]
|
||||
|
||||
# non standard parameters
|
||||
PFormatted = ["formatted"]
|
||||
|
||||
POSTParams = (
|
||||
RequestOptions.POSTParams
|
||||
+ RequestOptions.GeoParams
|
||||
+ PLevel
|
||||
+ PIncludeRestricted
|
||||
+ PIncludeAvailability
|
||||
+ PUpdateAfter
|
||||
+ PMatchTimeSeries
|
||||
+ PFormatted
|
||||
)
|
||||
GETParams = RequestOptions.GETParams + RequestOptions.WindowTimeParams + POSTParams
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self.service = "fdsnws-station"
|
||||
|
||||
self.includeSta = True
|
||||
self.includeCha = False
|
||||
self.includeRes = False
|
||||
|
||||
self.restricted = None
|
||||
self.availability = None
|
||||
self.updatedAfter = None
|
||||
self.matchTimeSeries = None
|
||||
|
||||
# non standard parameters
|
||||
self.formatted = None
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parse(self):
|
||||
self.parseTime(True)
|
||||
self.parseChannel()
|
||||
self.parseGeo()
|
||||
self.parseOutput()
|
||||
|
||||
# level: [network, station, channel, response]
|
||||
key, value = self.getFirstValue(self.PLevel)
|
||||
if value is not None:
|
||||
value = value.lower()
|
||||
if value in ("network", "net"):
|
||||
self.includeSta = False
|
||||
elif value in ("channel", "cha", "chan"):
|
||||
self.includeCha = True
|
||||
elif value in ("response", "res", "resp"):
|
||||
self.includeCha = True
|
||||
self.includeRes = True
|
||||
elif value not in ("station", "sta"):
|
||||
self.raiseValueError(key)
|
||||
|
||||
# includeRestricted (optional)
|
||||
self.restricted = self.parseBool(self.PIncludeRestricted)
|
||||
|
||||
# includeAvailability (optionalsc3ml)
|
||||
self.availability = self.parseBool(self.PIncludeAvailability)
|
||||
|
||||
# updatedAfter (optional), currently not supported
|
||||
self.updatedAfter = self.parseTimeStr(self.PUpdateAfter)
|
||||
|
||||
# includeAvailability (optional)
|
||||
self.matchTimeSeries = self.parseBool(self.PMatchTimeSeries)
|
||||
|
||||
# format XML
|
||||
self.formatted = self.parseBool(self.PFormatted)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def networkIter(self, inv, matchTime=False):
|
||||
for i in range(inv.networkCount()):
|
||||
net = inv.network(i)
|
||||
|
||||
for ro in self.streams:
|
||||
# network code
|
||||
if ro.channel and not ro.channel.matchNet(net.code()):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if matchTime and ro.time:
|
||||
try:
|
||||
end = net.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(net.start(), end):
|
||||
continue
|
||||
|
||||
yield net
|
||||
break
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def stationIter(self, net, matchTime=False):
|
||||
for i in range(net.stationCount()):
|
||||
sta = net.station(i)
|
||||
|
||||
# geographic location
|
||||
if self.geo:
|
||||
try:
|
||||
lat = sta.latitude()
|
||||
lon = sta.longitude()
|
||||
except ValueError:
|
||||
continue
|
||||
if not self.geo.match(lat, lon):
|
||||
continue
|
||||
|
||||
for ro in self.streams:
|
||||
# station code
|
||||
if ro.channel and (
|
||||
not ro.channel.matchSta(sta.code())
|
||||
or not ro.channel.matchNet(net.code())
|
||||
):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if matchTime and ro.time:
|
||||
try:
|
||||
end = sta.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(sta.start(), end):
|
||||
continue
|
||||
|
||||
yield sta
|
||||
break
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def locationIter(self, net, sta, matchTime=False):
|
||||
for i in range(sta.sensorLocationCount()):
|
||||
loc = sta.sensorLocation(i)
|
||||
|
||||
for ro in self.streams:
|
||||
# location code
|
||||
if ro.channel and (
|
||||
not ro.channel.matchLoc(loc.code())
|
||||
or not ro.channel.matchSta(sta.code())
|
||||
or not ro.channel.matchNet(net.code())
|
||||
):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if matchTime and ro.time:
|
||||
try:
|
||||
end = loc.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(loc.start(), end):
|
||||
continue
|
||||
|
||||
yield loc
|
||||
break
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def streamIter(self, net, sta, loc, matchTime, dac):
|
||||
for i in range(loc.streamCount()):
|
||||
stream = loc.stream(i)
|
||||
|
||||
for ro in self.streams:
|
||||
# stream code
|
||||
if ro.channel and (
|
||||
not ro.channel.matchCha(stream.code())
|
||||
or not ro.channel.matchLoc(loc.code())
|
||||
or not ro.channel.matchSta(sta.code())
|
||||
or not ro.channel.matchNet(net.code())
|
||||
):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if matchTime and ro.time:
|
||||
try:
|
||||
end = stream.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(stream.start(), end):
|
||||
continue
|
||||
|
||||
# match data availability extent
|
||||
if dac is not None and self.matchTimeSeries:
|
||||
extent = dac.extent(
|
||||
net.code(), sta.code(), loc.code(), stream.code()
|
||||
)
|
||||
if extent is None or (
|
||||
ro.time and not ro.time.match(extent.start(), extent.end())
|
||||
):
|
||||
continue
|
||||
|
||||
yield stream
|
||||
break
|
||||
|
||||
|
||||
################################################################################
|
||||
class FDSNStation(BaseResource):
|
||||
isLeaf = True
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(
|
||||
self,
|
||||
inv,
|
||||
restricted,
|
||||
maxObj,
|
||||
daEnabled,
|
||||
conditionalRequestsEnabled,
|
||||
timeInventoryLoaded,
|
||||
):
|
||||
super().__init__(VERSION)
|
||||
|
||||
self._inv = inv
|
||||
self._allowRestricted = restricted
|
||||
self._maxObj = maxObj
|
||||
self._daEnabled = daEnabled
|
||||
self._conditionalRequestsEnabled = conditionalRequestsEnabled
|
||||
self._timeInventoryLoaded = timeInventoryLoaded.seconds()
|
||||
|
||||
# additional object count dependent on detail level
|
||||
self._resLevelCount = (
|
||||
inv.responsePAZCount()
|
||||
+ inv.responseFIRCount()
|
||||
+ inv.responsePolynomialCount()
|
||||
+ inv.responseIIRCount()
|
||||
+ inv.responseFAPCount()
|
||||
)
|
||||
for i in range(inv.dataloggerCount()):
|
||||
self._resLevelCount += inv.datalogger(i).decimationCount()
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render_OPTIONS(self, req):
|
||||
req.setHeader("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
|
||||
req.setHeader(
|
||||
"Access-Control-Allow-Headers",
|
||||
"Accept, Content-Type, X-Requested-With, Origin",
|
||||
)
|
||||
req.setHeader("Content-Type", "text/plain; charset=utf-8")
|
||||
return ""
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render_GET(self, req):
|
||||
# Parse and validate GET parameters
|
||||
ro = _StationRequestOptions()
|
||||
try:
|
||||
ro.parseGET(req.args)
|
||||
ro.parse()
|
||||
# the GET operation supports exactly one stream filter
|
||||
ro.streams.append(ro)
|
||||
except ValueError as e:
|
||||
seiscomp.logging.warning(str(e))
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
|
||||
|
||||
return self._prepareRequest(req, ro)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render_POST(self, req):
|
||||
# Parse and validate POST parameters
|
||||
ro = _StationRequestOptions()
|
||||
try:
|
||||
ro.parsePOST(req.content)
|
||||
ro.parse()
|
||||
except ValueError as e:
|
||||
seiscomp.logging.warning(str(e))
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
|
||||
|
||||
return self._prepareRequest(req, ro)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def _prepareRequest(self, req, ro):
|
||||
if ro.availability and not self._daEnabled:
|
||||
msg = "including of availability information not supported"
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
|
||||
|
||||
if ro.updatedAfter:
|
||||
msg = "filtering based on update time not supported"
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
|
||||
|
||||
if ro.matchTimeSeries and not self._daEnabled:
|
||||
msg = "filtering based on available time series not supported"
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
|
||||
|
||||
# load data availability if requested
|
||||
dac = None
|
||||
if ro.availability or ro.matchTimeSeries:
|
||||
dac = Application.Instance().getDACache()
|
||||
if dac is None or len(dac.extents()) == 0:
|
||||
msg = "no data availabiltiy extent information found"
|
||||
return self.renderErrorPage(req, http.NO_CONTENT, msg, ro)
|
||||
|
||||
# Exporter, 'None' is used for text output
|
||||
if ro.format in ro.VText:
|
||||
if ro.includeRes:
|
||||
msg = "response level output not available in text format"
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
|
||||
req.setHeader("Content-Type", "text/plain; charset=utf-8")
|
||||
d = deferToThread(self._processRequestText, req, ro, dac)
|
||||
else:
|
||||
exp = Exporter.Create(ro.Exporters[ro.format])
|
||||
if exp is None:
|
||||
msg = (
|
||||
f"output format '{ro.format}' no available, export module "
|
||||
f"'{ro.Exporters[ro.format]}' could not be loaded."
|
||||
)
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
|
||||
|
||||
req.setHeader("Content-Type", "application/xml; charset=utf-8")
|
||||
exp.setFormattedOutput(bool(ro.formatted))
|
||||
d = deferToThread(self._processRequestExp, req, ro, exp, dac)
|
||||
|
||||
req.notifyFinish().addErrback(utils.onCancel, d)
|
||||
d.addBoth(utils.onFinish, req)
|
||||
|
||||
# The request is handled by the deferred object
|
||||
return server.NOT_DONE_YET
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def _processRequestExp(self, req, ro, exp, dac):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
|
||||
staCount, locCount, chaCount, extCount, objCount = 0, 0, 0, 0, 0
|
||||
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
|
||||
newInv = seiscomp.datamodel.Inventory()
|
||||
dataloggers, sensors, extents = set(), set(), {}
|
||||
|
||||
skipRestricted = not self._allowRestricted or (
|
||||
ro.restricted is not None and not ro.restricted
|
||||
)
|
||||
levelNet = not ro.includeSta
|
||||
levelSta = ro.includeSta and not ro.includeCha
|
||||
|
||||
isConditionalRequest = self._isConditionalRequest(req)
|
||||
|
||||
# iterate over inventory networks
|
||||
for net in ro.networkIter(self._inv, levelNet):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
if skipRestricted and utils.isRestricted(net):
|
||||
continue
|
||||
newNet = seiscomp.datamodel.Network(net)
|
||||
|
||||
# Copy comments
|
||||
for i in range(net.commentCount()):
|
||||
newNet.add(seiscomp.datamodel.Comment(net.comment(i)))
|
||||
|
||||
# iterate over inventory stations of current network
|
||||
for sta in ro.stationIter(net, levelSta):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
if skipRestricted and utils.isRestricted(sta):
|
||||
continue
|
||||
if not self.checkObjects(req, objCount, self._maxObj):
|
||||
return False
|
||||
|
||||
if ro.includeCha:
|
||||
numCha, numLoc, d, s, e = self._processStation(
|
||||
newNet, net, sta, ro, dac, skipRestricted, isConditionalRequest
|
||||
)
|
||||
if numCha > 0:
|
||||
if isConditionalRequest:
|
||||
self.returnNotModified(req, ro)
|
||||
return True
|
||||
locCount += numLoc
|
||||
chaCount += numCha
|
||||
extCount += len(e)
|
||||
objCount += numLoc + numCha + extCount
|
||||
if not self.checkObjects(req, objCount, self._maxObj):
|
||||
return False
|
||||
dataloggers |= d
|
||||
sensors |= s
|
||||
for k, v in e.items():
|
||||
if k not in extents:
|
||||
extents[k] = v
|
||||
elif self._matchStation(net, sta, ro, dac):
|
||||
if isConditionalRequest:
|
||||
self.returnNotModified(req, ro)
|
||||
return True
|
||||
if ro.includeSta:
|
||||
newSta = seiscomp.datamodel.Station(sta)
|
||||
# Copy comments
|
||||
for i in range(sta.commentCount()):
|
||||
newSta.add(seiscomp.datamodel.Comment(sta.comment(i)))
|
||||
newNet.add(newSta)
|
||||
else:
|
||||
# no station output requested: one matching station
|
||||
# is sufficient to include the network
|
||||
newInv.add(newNet)
|
||||
objCount += 1
|
||||
break
|
||||
|
||||
if newNet.stationCount() > 0:
|
||||
newInv.add(newNet)
|
||||
staCount += newNet.stationCount()
|
||||
objCount += staCount + 1
|
||||
|
||||
# Return 204 if no matching inventory was found
|
||||
if newInv.networkCount() == 0:
|
||||
msg = "no matching inventory found"
|
||||
self.writeErrorPage(req, http.NO_CONTENT, msg, ro)
|
||||
return True
|
||||
|
||||
if self._conditionalRequestsEnabled:
|
||||
req.setHeader(
|
||||
"Last-Modified", http.datetimeToString(self._timeInventoryLoaded)
|
||||
)
|
||||
|
||||
# Copy references (dataloggers, responses, sensors)
|
||||
decCount, resCount = 0, 0
|
||||
if ro.includeCha:
|
||||
decCount = self._copyReferences(
|
||||
newInv, req, objCount, self._inv, ro, dataloggers, sensors, self._maxObj
|
||||
)
|
||||
if decCount is None:
|
||||
return False
|
||||
|
||||
resCount = (
|
||||
newInv.responsePAZCount()
|
||||
+ newInv.responseFIRCount()
|
||||
+ newInv.responsePolynomialCount()
|
||||
+ newInv.responseFAPCount()
|
||||
+ newInv.responseIIRCount()
|
||||
)
|
||||
objCount += (
|
||||
resCount + decCount + newInv.dataloggerCount() + newInv.sensorCount()
|
||||
)
|
||||
|
||||
# Copy data extents
|
||||
objOut = newInv
|
||||
if len(extents) > 0:
|
||||
objCount += 1
|
||||
da = seiscomp.datamodel.DataAvailability()
|
||||
for k, v in extents.items():
|
||||
objCount += 1
|
||||
da.add(seiscomp.datamodel.DataExtent(v))
|
||||
objOut = ExportObjectList()
|
||||
objOut.append(newInv)
|
||||
objOut.append(da)
|
||||
|
||||
sink = utils.Sink(req)
|
||||
if not exp.write(sink, objOut):
|
||||
return False
|
||||
|
||||
seiscomp.logging.debug(
|
||||
f"{ro.service}: returned {newInv.networkCount()}Net, {staCount}Sta, "
|
||||
f"{locCount}Loc, {chaCount}Cha, {newInv.dataloggerCount()}DL, "
|
||||
f"{decCount}Dec, {newInv.sensorCount()}Sen, {resCount}Res, {extCount}DAExt "
|
||||
f"(total objects/bytes: {objCount}/{sink.written})"
|
||||
)
|
||||
utils.accessLog(req, ro, http.OK, sink.written, None)
|
||||
return True
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def _formatEpoch(obj):
|
||||
df = "%FT%T"
|
||||
dfMS = "%FT%T.%f"
|
||||
|
||||
if obj.start().microseconds() > 0:
|
||||
start = obj.start().toString(dfMS)
|
||||
else:
|
||||
start = obj.start().toString(df)
|
||||
|
||||
try:
|
||||
if obj.end().microseconds() > 0:
|
||||
end = obj.end().toString(dfMS)
|
||||
else:
|
||||
end = obj.end().toString(df)
|
||||
except ValueError:
|
||||
end = ""
|
||||
|
||||
return start, end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def _processRequestText(self, req, ro, dac):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
|
||||
skipRestricted = not self._allowRestricted or (
|
||||
ro.restricted is not None and not ro.restricted
|
||||
)
|
||||
isConditionalRequest = self._isConditionalRequest(req)
|
||||
|
||||
data = ""
|
||||
lines = []
|
||||
|
||||
# level = network
|
||||
if not ro.includeSta:
|
||||
data = "#Network|Description|StartTime|EndTime|TotalStations\n"
|
||||
|
||||
# iterate over inventory networks
|
||||
for net in ro.networkIter(self._inv, True):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
if skipRestricted and utils.isRestricted(net):
|
||||
continue
|
||||
|
||||
# at least one matching station is required
|
||||
stationFound = False
|
||||
for sta in ro.stationIter(net, False):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
if self._matchStation(net, sta, ro, dac) and not (
|
||||
skipRestricted and utils.isRestricted(sta)
|
||||
):
|
||||
stationFound = True
|
||||
break
|
||||
if not stationFound:
|
||||
continue
|
||||
if isConditionalRequest:
|
||||
self.returnNotModified(req, ro)
|
||||
return True
|
||||
|
||||
start, end = self._formatEpoch(net)
|
||||
lines.append(
|
||||
(
|
||||
f"{net.code()} {start}",
|
||||
f"{net.code()}|{net.description()}|{start}|{end}|"
|
||||
f"{net.stationCount()}\n",
|
||||
)
|
||||
)
|
||||
|
||||
# level = station
|
||||
elif not ro.includeCha:
|
||||
data = (
|
||||
"#Network|Station|Latitude|Longitude|Elevation|"
|
||||
"SiteName|StartTime|EndTime\n"
|
||||
)
|
||||
|
||||
# iterate over inventory networks
|
||||
for net in ro.networkIter(self._inv, False):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
if skipRestricted and utils.isRestricted(net):
|
||||
continue
|
||||
# iterate over inventory stations
|
||||
for sta in ro.stationIter(net, True):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
if not self._matchStation(net, sta, ro, dac) or (
|
||||
skipRestricted and utils.isRestricted(sta)
|
||||
):
|
||||
continue
|
||||
if isConditionalRequest:
|
||||
self.returnNotModified(req, ro)
|
||||
return True
|
||||
|
||||
try:
|
||||
lat = str(sta.latitude())
|
||||
except ValueError:
|
||||
lat = ""
|
||||
try:
|
||||
lon = str(sta.longitude())
|
||||
except ValueError:
|
||||
lon = ""
|
||||
try:
|
||||
elev = str(sta.elevation())
|
||||
except ValueError:
|
||||
elev = ""
|
||||
try:
|
||||
desc = sta.description()
|
||||
except ValueError:
|
||||
desc = ""
|
||||
|
||||
start, end = self._formatEpoch(sta)
|
||||
lines.append(
|
||||
(
|
||||
f"{net.code()}.{sta.code()} {start}",
|
||||
f"{net.code()}|{sta.code()}|{lat}|{lon}|{elev}|{desc}|"
|
||||
f"{start}|{end}\n",
|
||||
)
|
||||
)
|
||||
|
||||
# level = channel (resonse level not supported in text format)
|
||||
else:
|
||||
data = (
|
||||
"#Network|Station|Location|Channel|Latitude|Longitude|"
|
||||
"Elevation|Depth|Azimuth|Dip|SensorDescription|Scale|"
|
||||
"ScaleFreq|ScaleUnits|SampleRate|StartTime|EndTime\n"
|
||||
)
|
||||
|
||||
# iterate over inventory networks
|
||||
for net in ro.networkIter(self._inv, False):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
if skipRestricted and utils.isRestricted(net):
|
||||
continue
|
||||
# iterate over inventory stations, locations, streams
|
||||
for sta in ro.stationIter(net, False):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
if skipRestricted and utils.isRestricted(sta):
|
||||
continue
|
||||
for loc in ro.locationIter(net, sta, True):
|
||||
for stream in ro.streamIter(net, sta, loc, True, dac):
|
||||
if skipRestricted and utils.isRestricted(stream):
|
||||
continue
|
||||
if isConditionalRequest:
|
||||
self.returnNotModified(req, ro)
|
||||
return True
|
||||
|
||||
try:
|
||||
lat = str(loc.latitude())
|
||||
except ValueError:
|
||||
lat = ""
|
||||
try:
|
||||
lon = str(loc.longitude())
|
||||
except ValueError:
|
||||
lon = ""
|
||||
try:
|
||||
elev = str(loc.elevation())
|
||||
except ValueError:
|
||||
elev = ""
|
||||
try:
|
||||
depth = str(stream.depth())
|
||||
except ValueError:
|
||||
depth = ""
|
||||
try:
|
||||
azi = str(stream.azimuth())
|
||||
except ValueError:
|
||||
azi = ""
|
||||
try:
|
||||
dip = str(stream.dip())
|
||||
except ValueError:
|
||||
dip = ""
|
||||
|
||||
desc = ""
|
||||
try:
|
||||
sensor = self._inv.findSensor(stream.sensor())
|
||||
if sensor is not None:
|
||||
desc = sensor.description()
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
try:
|
||||
scale = str(stream.gain())
|
||||
except ValueError:
|
||||
scale = ""
|
||||
try:
|
||||
scaleFreq = str(stream.gainFrequency())
|
||||
except ValueError:
|
||||
scaleFreq = ""
|
||||
try:
|
||||
scaleUnit = str(stream.gainUnit())
|
||||
except ValueError:
|
||||
scaleUnit = ""
|
||||
try:
|
||||
sr = str(
|
||||
stream.sampleRateNumerator()
|
||||
/ stream.sampleRateDenominator()
|
||||
)
|
||||
except (ValueError, ZeroDivisionError):
|
||||
sr = ""
|
||||
|
||||
start, end = self._formatEpoch(stream)
|
||||
lines.append(
|
||||
(
|
||||
f"{net.code()}.{sta.code()}.{loc.code()}."
|
||||
f"{stream.code()} {start}",
|
||||
f"{net.code()}|{sta.code()}|{loc.code()}|"
|
||||
f"{stream.code()}|{lat}|{lon}|{elev}|{depth}|{azi}|"
|
||||
f"{dip}|{desc}|{scale}|{scaleFreq}|{scaleUnit}|"
|
||||
f"{sr}|{start}|{end}\n",
|
||||
)
|
||||
)
|
||||
|
||||
# sort lines and append to final data string
|
||||
lines.sort(key=lambda line: line[0])
|
||||
for line in lines:
|
||||
data += line[1]
|
||||
|
||||
# Return 204 if no matching inventory was found
|
||||
if len(lines) == 0:
|
||||
msg = "no matching inventory found"
|
||||
self.writeErrorPage(req, http.NO_CONTENT, msg, ro)
|
||||
return False
|
||||
|
||||
if self._conditionalRequestsEnabled:
|
||||
req.setHeader(
|
||||
"Last-Modified", http.datetimeToString(self._timeInventoryLoaded)
|
||||
)
|
||||
|
||||
dataBin = utils.b_str(data)
|
||||
utils.writeTSBin(req, dataBin)
|
||||
seiscomp.logging.debug(
|
||||
f"{ro.service}: returned {len(lines)} lines (total bytes: {len(dataBin)})"
|
||||
)
|
||||
utils.accessLog(req, ro, http.OK, len(dataBin), None)
|
||||
return True
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def _isConditionalRequest(self, req):
|
||||
# support for time based conditional requests
|
||||
if not self._conditionalRequestsEnabled:
|
||||
return False
|
||||
if req.method not in (b"GET", b"HEAD"):
|
||||
return False
|
||||
if req.getHeader("If-None-Match") is not None:
|
||||
return False
|
||||
|
||||
modifiedSince = req.getHeader("If-Modified-Since")
|
||||
if not modifiedSince:
|
||||
return False
|
||||
|
||||
modifiedSince = utils.stringToDatetime(modifiedSince)
|
||||
return modifiedSince and self._timeInventoryLoaded <= modifiedSince
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Checks if at least one location and channel combination matches the
|
||||
# request options
|
||||
@staticmethod
|
||||
def _matchStation(net, sta, ro, dac):
|
||||
# No filter: return true immediately
|
||||
if dac is None and (
|
||||
not ro.channel or (not ro.channel.loc and not ro.channel.cha)
|
||||
):
|
||||
return True
|
||||
|
||||
for loc in ro.locationIter(net, sta, False):
|
||||
if dac is None and not ro.channel.cha and not ro.time:
|
||||
return True
|
||||
|
||||
for _ in ro.streamIter(net, sta, loc, False, dac):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Adds a deep copy of the specified station to the new network if the
|
||||
# location and channel combination matches the request options (if any)
|
||||
@staticmethod
|
||||
def _processStation(
|
||||
newNet, net, sta, ro, dac, skipRestricted, isConditionalRequest
|
||||
):
|
||||
chaCount = 0
|
||||
dataloggers, sensors, extents = set(), set(), {}
|
||||
newSta = seiscomp.datamodel.Station(sta)
|
||||
includeAvailability = dac is not None and ro.availability
|
||||
|
||||
# Copy comments
|
||||
for i in range(sta.commentCount()):
|
||||
newSta.add(seiscomp.datamodel.Comment(sta.comment(i)))
|
||||
|
||||
for loc in ro.locationIter(net, sta, True):
|
||||
newLoc = seiscomp.datamodel.SensorLocation(loc)
|
||||
# Copy comments
|
||||
for i in range(loc.commentCount()):
|
||||
newLoc.add(seiscomp.datamodel.Comment(loc.comment(i)))
|
||||
|
||||
for stream in ro.streamIter(net, sta, loc, True, dac):
|
||||
if skipRestricted and utils.isRestricted(stream):
|
||||
continue
|
||||
if isConditionalRequest:
|
||||
return 1, 1, [], [], []
|
||||
newCha = seiscomp.datamodel.Stream(stream)
|
||||
# Copy comments
|
||||
for i in range(stream.commentCount()):
|
||||
newCha.add(seiscomp.datamodel.Comment(stream.comment(i)))
|
||||
newLoc.add(newCha)
|
||||
dataloggers.add(stream.datalogger())
|
||||
sensors.add(stream.sensor())
|
||||
if includeAvailability:
|
||||
ext = dac.extent(net.code(), sta.code(), loc.code(), stream.code())
|
||||
if ext is not None and ext.publicID() not in extents:
|
||||
extents[ext.publicID()] = ext
|
||||
|
||||
if newLoc.streamCount() > 0:
|
||||
newSta.add(newLoc)
|
||||
chaCount += newLoc.streamCount()
|
||||
|
||||
if newSta.sensorLocationCount() > 0:
|
||||
newNet.add(newSta)
|
||||
return chaCount, newSta.sensorLocationCount(), dataloggers, sensors, extents
|
||||
|
||||
return 0, 0, [], [], []
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Copy references (data loggers, sensors, responses) depended on request
|
||||
# options
|
||||
def _copyReferences(
|
||||
self, newInv, req, objCount, inv, ro, dataloggers, sensors, maxObj
|
||||
):
|
||||
responses = set()
|
||||
decCount = 0
|
||||
|
||||
# datalogger
|
||||
for i in range(inv.dataloggerCount()):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return None
|
||||
logger = inv.datalogger(i)
|
||||
if logger.publicID() not in dataloggers:
|
||||
continue
|
||||
newLogger = seiscomp.datamodel.Datalogger(logger)
|
||||
newInv.add(newLogger)
|
||||
# decimations are only needed for responses
|
||||
if ro.includeRes:
|
||||
for j in range(logger.decimationCount()):
|
||||
decimation = logger.decimation(j)
|
||||
newLogger.add(seiscomp.datamodel.Decimation(decimation))
|
||||
|
||||
# collect response ids
|
||||
filterStr = ""
|
||||
try:
|
||||
filterStr = f"{decimation.analogueFilterChain().content()} "
|
||||
except ValueError:
|
||||
pass
|
||||
try:
|
||||
filterStr += decimation.digitalFilterChain().content()
|
||||
except ValueError:
|
||||
pass
|
||||
for resp in filterStr.split():
|
||||
responses.add(resp)
|
||||
decCount += newLogger.decimationCount()
|
||||
|
||||
objCount += newInv.dataloggerCount() + decCount
|
||||
resCount = len(responses)
|
||||
if not self.checkObjects(req, objCount + resCount, maxObj):
|
||||
return None
|
||||
|
||||
# sensor
|
||||
for i in range(inv.sensorCount()):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return None
|
||||
sensor = inv.sensor(i)
|
||||
if sensor.publicID() not in sensors:
|
||||
continue
|
||||
newSensor = seiscomp.datamodel.Sensor(sensor)
|
||||
newInv.add(newSensor)
|
||||
resp = newSensor.response()
|
||||
if resp:
|
||||
if ro.includeRes:
|
||||
responses.add(resp)
|
||||
else:
|
||||
# no responses: remove response reference to avoid missing
|
||||
# response warning of exporter
|
||||
newSensor.setResponse("")
|
||||
|
||||
objCount += newInv.sensorCount()
|
||||
resCount = len(responses)
|
||||
if not self.checkObjects(req, objCount + resCount, maxObj):
|
||||
return None
|
||||
|
||||
# responses
|
||||
if ro.includeRes:
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return None
|
||||
for i in range(inv.responsePAZCount()):
|
||||
resp = inv.responsePAZ(i)
|
||||
if resp.publicID() in responses:
|
||||
newInv.add(seiscomp.datamodel.ResponsePAZ(resp))
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return None
|
||||
for i in range(inv.responseFIRCount()):
|
||||
resp = inv.responseFIR(i)
|
||||
if resp.publicID() in responses:
|
||||
newInv.add(seiscomp.datamodel.ResponseFIR(resp))
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return None
|
||||
for i in range(inv.responsePolynomialCount()):
|
||||
resp = inv.responsePolynomial(i)
|
||||
if resp.publicID() in responses:
|
||||
newInv.add(seiscomp.datamodel.ResponsePolynomial(resp))
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return None
|
||||
for i in range(inv.responseFAPCount()):
|
||||
resp = inv.responseFAP(i)
|
||||
if resp.publicID() in responses:
|
||||
newInv.add(seiscomp.datamodel.ResponseFAP(resp))
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return None
|
||||
for i in range(inv.responseIIRCount()):
|
||||
resp = inv.responseIIR(i)
|
||||
if resp.publicID() in responses:
|
||||
newInv.add(seiscomp.datamodel.ResponseIIR(resp))
|
||||
|
||||
return decCount
|
||||
|
||||
|
||||
# vim: ts=4 et
|
201
lib/python/seiscomp/fdsnws/utils.py
Normal file
201
lib/python/seiscomp/fdsnws/utils.py
Normal file
@ -0,0 +1,201 @@
|
||||
################################################################################
|
||||
# Copyright (C) 2013-2014 gempa GmbH
|
||||
#
|
||||
# Common utility functions
|
||||
#
|
||||
# Author: Stephan Herrnkind
|
||||
# Email: herrnkind@gempa.de
|
||||
################################################################################
|
||||
|
||||
import socket
|
||||
import traceback
|
||||
|
||||
import twisted
|
||||
|
||||
from twisted.internet import reactor, defer
|
||||
from twisted.python.failure import Failure
|
||||
from twisted.web import http
|
||||
|
||||
|
||||
import seiscomp.logging
|
||||
import seiscomp.core
|
||||
import seiscomp.io
|
||||
from seiscomp.client import Application
|
||||
|
||||
twisted_version = (twisted.version.major, twisted.version.minor, twisted.version.micro)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Converts a unicode string to a byte string
|
||||
def b_str(unicode_string):
|
||||
return unicode_string.encode("utf-8", "replace")
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Converts a byte string to a unicode string
|
||||
def u_str(byte_string):
|
||||
return byte_string.decode("utf-8", "replace")
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Tests if a SC3 inventory object is restricted
|
||||
def isRestricted(obj):
|
||||
try:
|
||||
return obj.restricted()
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Thread-safe write of string data using reactor main thread
|
||||
def writeTS(req, data):
|
||||
reactor.callFromThread(req.write, b_str(data))
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Thread-safe write of binary data using reactor main thread
|
||||
def writeTSBin(req, data):
|
||||
reactor.callFromThread(req.write, data)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Finish requests deferred to threads
|
||||
def onFinish(result, req):
|
||||
seiscomp.logging.debug(f"finish value = {str(result)}")
|
||||
if isinstance(result, Failure):
|
||||
err = result.value
|
||||
if isinstance(err, defer.CancelledError):
|
||||
seiscomp.logging.error("request canceled")
|
||||
return
|
||||
seiscomp.logging.error(
|
||||
f"{result.getErrorMessage()} "
|
||||
f"{traceback.format_tb(result.getTracebackObject())}"
|
||||
)
|
||||
else:
|
||||
if result:
|
||||
seiscomp.logging.debug("request successfully served")
|
||||
else:
|
||||
seiscomp.logging.debug("request failed")
|
||||
|
||||
reactor.callFromThread(req.finish)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Handle connection errors
|
||||
def onCancel(failure, req):
|
||||
if failure:
|
||||
seiscomp.logging.error(
|
||||
f"{failure.getErrorMessage()} "
|
||||
f"{traceback.format_tb(failure.getTracebackObject())}"
|
||||
)
|
||||
else:
|
||||
seiscomp.logging.error("request canceled")
|
||||
req.cancel()
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Handle premature connection reset
|
||||
def onResponseFailure(_, call):
|
||||
seiscomp.logging.error("response canceled")
|
||||
call.cancel()
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Renders error page if the result set exceeds the configured maximum number
|
||||
# objects
|
||||
def accessLog(req, ro, code, length, err):
|
||||
logger = Application.Instance()._accessLog # pylint: disable=W0212
|
||||
if logger is None:
|
||||
return
|
||||
|
||||
logger.log(AccessLogEntry(req, ro, code, length, err))
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Compability function for stringToDatetime() change in Twisted 24.7, see
|
||||
# https://github.com/twisted/twisted/commit/731e370dfc5d2f7224dc1e12931ddf5c51b211a6
|
||||
def stringToDatetime(dateString):
|
||||
if twisted_version < (24, 7):
|
||||
return http.stringToDatetime(dateString)
|
||||
|
||||
# Since version 24.7 the argument needs to be a byte string
|
||||
return http.stringToDatetime(dateString.encode("ascii"))
|
||||
|
||||
|
||||
################################################################################
|
||||
class Sink(seiscomp.io.ExportSink):
|
||||
def __init__(self, request):
|
||||
super().__init__()
|
||||
|
||||
self.request = request
|
||||
self.written = 0
|
||||
|
||||
def write(self, data):
|
||||
if self.request._disconnected: # pylint: disable=W0212
|
||||
return -1
|
||||
|
||||
writeTSBin(self.request, data)
|
||||
self.written += len(data)
|
||||
return len(data)
|
||||
|
||||
|
||||
################################################################################
|
||||
class AccessLogEntry:
|
||||
def __init__(self, req, ro, code, length, err):
|
||||
# user agent
|
||||
agent = req.getHeader("User-Agent")
|
||||
if agent is None:
|
||||
agent = ""
|
||||
else:
|
||||
agent = agent[:100].replace("|", " ")
|
||||
|
||||
if err is None:
|
||||
err = ""
|
||||
|
||||
service, user, accessTime, procTime = "", "", "", 0
|
||||
net, sta, loc, cha = "", "", "", ""
|
||||
if ro is not None:
|
||||
# processing time in milliseconds
|
||||
procTime = int((seiscomp.core.Time.GMT() - ro.accessTime).length() * 1000.0)
|
||||
|
||||
service = ro.service
|
||||
if ro.userName is not None:
|
||||
user = ro.userName
|
||||
accessTime = str(ro.accessTime)
|
||||
|
||||
if ro.channel is not None:
|
||||
if ro.channel.net is not None:
|
||||
net = ",".join(ro.channel.net)
|
||||
if ro.channel.sta is not None:
|
||||
sta = ",".join(ro.channel.sta)
|
||||
if ro.channel.loc is not None:
|
||||
loc = ",".join(ro.channel.loc)
|
||||
if ro.channel.cha is not None:
|
||||
cha = ",".join(ro.channel.cha)
|
||||
|
||||
# The host name of the client is resolved in the __str__ method by the
|
||||
# logging thread so that a long running DNS reverse lookup may not slow
|
||||
# down the request
|
||||
self.msgPrefix = f"{service}|{u_str(req.getRequestHostname())}|{accessTime}|"
|
||||
|
||||
xff = req.requestHeaders.getRawHeaders("x-forwarded-for")
|
||||
if xff:
|
||||
self.userIP = xff[0].split(",")[0].strip()
|
||||
else:
|
||||
self.userIP = req.getClientIP()
|
||||
|
||||
self.clientIP = req.getClientIP()
|
||||
self.msgSuffix = (
|
||||
f"|{self.clientIP}|{length}|{procTime}|{err}|{agent}|{code}|{user}|{net}"
|
||||
f"|{sta}|{loc}|{cha}||"
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
try:
|
||||
userHost = socket.gethostbyaddr(self.userIP)[0]
|
||||
except socket.herror:
|
||||
userHost = self.userIP
|
||||
return self.msgPrefix + userHost + self.msgSuffix
|
||||
|
||||
|
||||
# vim: ts=4 et
|
1275
lib/python/seiscomp/geo.py
Normal file
1275
lib/python/seiscomp/geo.py
Normal file
File diff suppressed because it is too large
Load Diff
2532
lib/python/seiscomp/io.py
Normal file
2532
lib/python/seiscomp/io.py
Normal file
File diff suppressed because it is too large
Load Diff
386
lib/python/seiscomp/kernel.py
Normal file
386
lib/python/seiscomp/kernel.py
Normal file
@ -0,0 +1,386 @@
|
||||
############################################################################
|
||||
# Copyright (C) by gempa GmbH, GFZ Potsdam #
|
||||
# #
|
||||
# You can redistribute and/or modify this program under the #
|
||||
# terms of the SeisComP Public License. #
|
||||
# #
|
||||
# This program is distributed in the hope that it will be useful, #
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
|
||||
# SeisComP Public License for more details. #
|
||||
############################################################################
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import string
|
||||
import subprocess
|
||||
import seiscomp.config
|
||||
|
||||
|
||||
class Template(string.Template):
|
||||
idpattern = r'[_a-z][_a-z0-9.]*'
|
||||
|
||||
|
||||
class Environment(seiscomp.config.Config):
|
||||
def __init__(self, rootPath):
|
||||
seiscomp.config.Config.__init__(self)
|
||||
self.SEISCOMP_ROOT = rootPath
|
||||
try:
|
||||
self.home_dir = os.environ["HOME"]
|
||||
except:
|
||||
self.home_dir = "."
|
||||
|
||||
try:
|
||||
self.local_config_dir = os.environ["SEISCOMP_LOCAL_CONFIG"]
|
||||
except:
|
||||
self.local_config_dir = os.path.join(self.home_dir, ".seiscomp")
|
||||
|
||||
self.root = rootPath
|
||||
self.bin_dir = os.path.join(self.root, "bin")
|
||||
self.data_dir = os.path.join(self.root, "share")
|
||||
self.etc_dir = os.path.join(self.root, "etc")
|
||||
self.etc_defaults_dir = os.path.join(self.root, "etc", "defaults")
|
||||
self.descriptions_dir = os.path.join(self.root, "etc", "descriptions")
|
||||
self.key_dir = os.path.join(self.root, "etc", "key")
|
||||
self.var_dir = os.path.join(self.root, "var")
|
||||
self.log_dir = os.path.join(self.local_config_dir, "log")
|
||||
self.cwd = None
|
||||
self.last_template_file = None
|
||||
|
||||
self._csv = False
|
||||
self._readConfig()
|
||||
|
||||
os.environ["SEISCOMP_ROOT"] = self.SEISCOMP_ROOT
|
||||
|
||||
# Add LD_LIBRARY_PATH and PATH to OS environment
|
||||
LD_LIBRARY_PATH = os.path.join(self.SEISCOMP_ROOT, "lib")
|
||||
BIN_PATH = os.path.join(self.SEISCOMP_ROOT, "bin")
|
||||
SBIN_PATH = os.path.join(self.SEISCOMP_ROOT, "sbin")
|
||||
PATH = BIN_PATH + ":" + SBIN_PATH
|
||||
PYTHONPATH = os.path.join(self.SEISCOMP_ROOT, "lib", "python")
|
||||
try:
|
||||
LD_LIBRARY_PATH = os.environ["LD_LIBRARY_PATH"] + \
|
||||
":" + LD_LIBRARY_PATH
|
||||
except:
|
||||
pass
|
||||
os.environ["LD_LIBRARY_PATH"] = LD_LIBRARY_PATH
|
||||
try:
|
||||
PATH = PATH + ":" + os.environ["PATH"]
|
||||
except:
|
||||
pass
|
||||
os.environ["PATH"] = PATH
|
||||
try:
|
||||
PYTHONPATH = os.environ["PYTHONPATH"] + ":" + PYTHONPATH
|
||||
except:
|
||||
pass
|
||||
os.environ["PYTHONPATH"] = PYTHONPATH
|
||||
|
||||
# Create required directories
|
||||
try:
|
||||
os.makedirs(os.path.join(self.root, "var", "log"))
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
os.makedirs(os.path.join(self.root, "var", "run"))
|
||||
except:
|
||||
pass
|
||||
|
||||
def _readConfig(self):
|
||||
self.syslog = False
|
||||
|
||||
# Read configuration file
|
||||
kernelCfg = os.path.join(self.root, "etc", "kernel.cfg")
|
||||
if self.readConfig(kernelCfg) == False:
|
||||
return
|
||||
|
||||
try:
|
||||
self.syslog = self.getBool("syslog")
|
||||
except:
|
||||
pass
|
||||
|
||||
# Changes into the SEISCOMP_ROOT directory
|
||||
def chroot(self):
|
||||
if self.root:
|
||||
# Remember current directory
|
||||
self.cwd = os.getcwd()
|
||||
os.chdir(self.SEISCOMP_ROOT)
|
||||
self.root = ""
|
||||
|
||||
# Changes back to the current workdir
|
||||
def chback(self):
|
||||
if self.cwd:
|
||||
os.chdir(self.cwd)
|
||||
self.cwd = None
|
||||
self.root = self.SEISCOMP_ROOT
|
||||
|
||||
def resolvePath(self, path):
|
||||
return path.replace("@LOGDIR@", self.log_dir)\
|
||||
.replace("@CONFIGDIR@", self.local_config_dir)\
|
||||
.replace("@DEFAULTCONFIGDIR@", self.etc_defaults_dir)\
|
||||
.replace("@SYSTEMCONFIGDIR@", self.etc_dir)\
|
||||
.replace("@ROOTDIR@", self.root)\
|
||||
.replace("@DATADIR@", self.data_dir)\
|
||||
.replace("@KEYDIR@", self.key_dir)\
|
||||
.replace("@HOMEDIR@", self.home_dir)
|
||||
|
||||
def setCSVOutput(self, csv):
|
||||
self._csv = csv
|
||||
|
||||
def enableModule(self, name):
|
||||
runFile = os.path.join(self.root, "etc", "init", name + ".auto")
|
||||
if os.path.exists(runFile):
|
||||
print("%s is already enabled" % name)
|
||||
return 0
|
||||
try:
|
||||
open(runFile, 'w').close()
|
||||
print("enabled %s" % name)
|
||||
return 0
|
||||
except Exception as exc:
|
||||
sys.stderr.write(str(exc) + "\n")
|
||||
sys.stderr.flush()
|
||||
return 0
|
||||
|
||||
def disableModule(self, name):
|
||||
runFile = os.path.join(self.root, "etc", "init", name + ".auto")
|
||||
if not os.path.exists(runFile):
|
||||
print("%s is not enabled" % name)
|
||||
return 0
|
||||
try:
|
||||
os.remove(runFile)
|
||||
print("disabled %s" % name)
|
||||
except Exception as exc:
|
||||
sys.stderr.write(str(exc) + "\n")
|
||||
sys.stderr.flush()
|
||||
return 0
|
||||
|
||||
def isModuleEnabled(self, name):
|
||||
runFile = os.path.join(self.root, "etc", "init", name + ".auto")
|
||||
return os.path.exists(runFile) == True
|
||||
|
||||
# Return the module name from a path
|
||||
def moduleName(self, path):
|
||||
return os.path.splitext(os.path.basename(path))[0]
|
||||
|
||||
# Returns a module's lockfile
|
||||
def lockFile(self, module):
|
||||
return os.path.join(self.root, "var", "run", module + ".pid")
|
||||
|
||||
# Returns a module's runfile
|
||||
def runFile(self, module):
|
||||
return os.path.join(self.root, "var", "run", module + ".run")
|
||||
|
||||
# Returns a module's logfile
|
||||
def logFile(self, module):
|
||||
return os.path.join(self.root, "var", "log", module + ".log")
|
||||
|
||||
# Returns the binary file path of a given module name
|
||||
def binaryFile(self, module):
|
||||
# return os.path.join(self.root, "bin/" + module)
|
||||
return module
|
||||
|
||||
def start(self, module, binary, params, nohup=False):
|
||||
cmd = binary + " " + params + " >" + self.logFile(module) + " 2>&1"
|
||||
if nohup:
|
||||
cmd = "nohup " + cmd + " &"
|
||||
return os.system(cmd)
|
||||
|
||||
def stop(self, module, timeout):
|
||||
return self.killWait(module, timeout)
|
||||
|
||||
def tryLock(self, module, timeout = None):
|
||||
if timeout is None:
|
||||
return subprocess.call("trylock " + self.lockFile(module), shell=True) == 0
|
||||
else:
|
||||
try:
|
||||
timeoutSeconds = int(timeout)
|
||||
except:
|
||||
print("Invalid timeout parameter, expected positive integer")
|
||||
raise
|
||||
return subprocess.call("waitlock %d \"%s\"" % (timeoutSeconds, self.lockFile(module)), shell=True) == 0
|
||||
|
||||
def killWait(self, module, timeout):
|
||||
lockfile = self.lockFile(module)
|
||||
|
||||
# Open pid file
|
||||
f = open(lockfile, "r")
|
||||
|
||||
# Try to read the pid
|
||||
try:
|
||||
pid = int(f.readline())
|
||||
except:
|
||||
f.close()
|
||||
raise
|
||||
|
||||
# Kill process with pid
|
||||
subprocess.call("kill %d" % pid, shell=True)
|
||||
if subprocess.call("waitlock %d \"%s\"" % (timeout, lockfile), shell=True) != 0:
|
||||
print("timeout exceeded")
|
||||
subprocess.call("kill -9 %d" % pid, shell=True)
|
||||
|
||||
# Remove pid file
|
||||
try:
|
||||
os.remove(lockfile)
|
||||
except:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
def processTemplate(self, templateFile, paths, params, printError=False):
|
||||
self.last_template_file = None
|
||||
|
||||
for tp in paths:
|
||||
if os.path.exists(os.path.join(tp, templateFile)):
|
||||
break
|
||||
|
||||
else:
|
||||
if printError:
|
||||
print("Error: template %s not found" % templateFile)
|
||||
return ""
|
||||
|
||||
filename = os.path.join(tp, templateFile)
|
||||
self.last_template_file = filename
|
||||
|
||||
try:
|
||||
t = Template(open(filename).read())
|
||||
except:
|
||||
if printError:
|
||||
print("Error: template %s not readable" % filename)
|
||||
return ""
|
||||
|
||||
params['date'] = time.ctime()
|
||||
params['template'] = filename
|
||||
|
||||
while True:
|
||||
try:
|
||||
return t.substitute(params)
|
||||
|
||||
except KeyError as e:
|
||||
print("warning: $%s is not defined in %s" % (e.args[0], filename))
|
||||
params[e.args[0]] = ""
|
||||
|
||||
except ValueError as e:
|
||||
raise ValueError("%s: %s" % (filename, str(e)))
|
||||
|
||||
def logStatus(self, name, isRunning, shouldRun, isEnabled):
|
||||
if self._csv == False:
|
||||
sys.stdout.write("%-20s is " % name)
|
||||
if not isRunning:
|
||||
sys.stdout.write("not ")
|
||||
sys.stdout.write("running")
|
||||
if not isRunning and shouldRun:
|
||||
sys.stdout.write(" [WARNING]")
|
||||
sys.stdout.write("\n")
|
||||
else:
|
||||
sys.stdout.write("%s;%d;%d;%d\n" % (
|
||||
name, int(isRunning), int(shouldRun), int(isEnabled)))
|
||||
sys.stdout.flush()
|
||||
|
||||
def log(self, line):
|
||||
sys.stdout.write(line + "\n")
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
# The module interface which implementes the basic default operations.
|
||||
# Each script can define its own handlers to customize the behaviour.
|
||||
# Available handlers:
|
||||
# start()
|
||||
# stop()
|
||||
# check()
|
||||
# status(shouldRun)
|
||||
# setup(params = dict{name, values as []})
|
||||
# updateConfig()
|
||||
class Module:
|
||||
def __init__(self, env, name):
|
||||
self.env = env
|
||||
self.name = name
|
||||
# The start order
|
||||
self.order = 100
|
||||
# Defines if this is a kernel module or not.
|
||||
# Kernel modules are always started
|
||||
self.isKernelModule = False
|
||||
# Defines if this is a config only module
|
||||
self.isConfigModule = False
|
||||
# Set default timeout when stopping a module to 10 seconds before killing it
|
||||
self.killTimeout = 10
|
||||
# Set default timeout when reloading a module to 10 seconds
|
||||
self.reloadTimeout = 10
|
||||
|
||||
def _get_start_params(self):
|
||||
# Run as daemon
|
||||
params = "-D"
|
||||
|
||||
# Enable syslog if configured
|
||||
if self.env.syslog == True:
|
||||
params = params + "s"
|
||||
|
||||
params = params + " -l " + self.env.lockFile(self.name)
|
||||
return params
|
||||
|
||||
def _run(self):
|
||||
return self.env.start(self.name, self.env.binaryFile(self.name), self._get_start_params())
|
||||
|
||||
def isRunning(self):
|
||||
return self.env.tryLock(self.name) == False
|
||||
|
||||
def start(self):
|
||||
if self.isRunning():
|
||||
self.env.log("%s is already running" % self.name)
|
||||
return 1
|
||||
|
||||
self.env.log("starting %s" % self.name)
|
||||
return self._run()
|
||||
|
||||
def stop(self):
|
||||
if not self.isRunning():
|
||||
self.env.log("%s is not running" % self.name)
|
||||
return 1
|
||||
|
||||
self.env.log("shutting down %s" % self.name)
|
||||
# Default timeout to 10 seconds
|
||||
return self.env.stop(self.name, self.killTimeout)
|
||||
|
||||
def reload(self):
|
||||
self.env.log("reload not supported by %s" % self.name)
|
||||
return 1
|
||||
|
||||
# Check is the same as start. If a module should be checked
|
||||
# is decided by the control script which check the existence
|
||||
# of a corresponding run file.
|
||||
def check(self):
|
||||
return self.start()
|
||||
|
||||
def status(self, shouldRun):
|
||||
self.env.logStatus(self.name, self.isRunning(), shouldRun, self.env.isModuleEnabled(
|
||||
self.name) or isinstance(self, CoreModule))
|
||||
|
||||
def requiresKernelModules(self):
|
||||
# The default handler triggers a start of kernel modules before updating
|
||||
# its configuration
|
||||
return True
|
||||
|
||||
def updateConfigProxy(self):
|
||||
# This function must return either a string containing the module name
|
||||
# of the proxy module that should be configured as well or None.
|
||||
return None
|
||||
|
||||
def updateConfig(self):
|
||||
# This function must return a number indicating the error code where
|
||||
# 0 means no error. The default handler doesn't do anything.
|
||||
return 0
|
||||
|
||||
def printCrontab(self):
|
||||
# The default handler doesn't do anything
|
||||
return 0
|
||||
|
||||
def supportsAliases(self):
|
||||
# The default handler does not support aliases
|
||||
return False
|
||||
|
||||
|
||||
# Define a kernel core module which is started always
|
||||
class CoreModule(Module):
|
||||
def __init__(self, env, name):
|
||||
Module.__init__(self, env, name)
|
0
lib/python/seiscomp/legacy/__init__.py
Normal file
0
lib/python/seiscomp/legacy/__init__.py
Normal file
6
lib/python/seiscomp/legacy/db/__init__.py
Normal file
6
lib/python/seiscomp/legacy/db/__init__.py
Normal file
@ -0,0 +1,6 @@
|
||||
from __future__ import (absolute_import, division, print_function,
|
||||
unicode_literals)
|
||||
|
||||
class DBError(Exception):
|
||||
pass
|
||||
|
0
lib/python/seiscomp/legacy/db/seiscomp3/__init__.py
Normal file
0
lib/python/seiscomp/legacy/db/seiscomp3/__init__.py
Normal file
1149
lib/python/seiscomp/legacy/db/seiscomp3/inventory.py
Normal file
1149
lib/python/seiscomp/legacy/db/seiscomp3/inventory.py
Normal file
File diff suppressed because it is too large
Load Diff
6267
lib/python/seiscomp/legacy/db/seiscomp3/sc3wrap.py
Normal file
6267
lib/python/seiscomp/legacy/db/seiscomp3/sc3wrap.py
Normal file
File diff suppressed because it is too large
Load Diff
0
lib/python/seiscomp/legacy/db/xmlio/__init__.py
Normal file
0
lib/python/seiscomp/legacy/db/xmlio/__init__.py
Normal file
1037
lib/python/seiscomp/legacy/db/xmlio/inventory.py
Normal file
1037
lib/python/seiscomp/legacy/db/xmlio/inventory.py
Normal file
File diff suppressed because it is too large
Load Diff
4164
lib/python/seiscomp/legacy/db/xmlio/xmlwrap.py
Normal file
4164
lib/python/seiscomp/legacy/db/xmlio/xmlwrap.py
Normal file
File diff suppressed because it is too large
Load Diff
3006
lib/python/seiscomp/legacy/fseed.py
Normal file
3006
lib/python/seiscomp/legacy/fseed.py
Normal file
File diff suppressed because it is too large
Load Diff
221
lib/python/seiscomp/logging.py
Normal file
221
lib/python/seiscomp/logging.py
Normal file
@ -0,0 +1,221 @@
|
||||
# This file was automatically generated by SWIG (http://www.swig.org).
|
||||
# Version 4.0.2
|
||||
#
|
||||
# Do not make changes to this file unless you know what you are doing--modify
|
||||
# the SWIG interface file instead.
|
||||
|
||||
from sys import version_info as _swig_python_version_info
|
||||
if _swig_python_version_info < (2, 7, 0):
|
||||
raise RuntimeError("Python 2.7 or later required")
|
||||
|
||||
# Import the low-level C/C++ module
|
||||
if __package__ or "." in __name__:
|
||||
from . import _logging
|
||||
else:
|
||||
import _logging
|
||||
|
||||
try:
|
||||
import builtins as __builtin__
|
||||
except ImportError:
|
||||
import __builtin__
|
||||
|
||||
def _swig_repr(self):
|
||||
try:
|
||||
strthis = "proxy of " + self.this.__repr__()
|
||||
except __builtin__.Exception:
|
||||
strthis = ""
|
||||
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_instance_variable(set):
|
||||
def set_instance_attr(self, name, value):
|
||||
if name == "thisown":
|
||||
self.this.own(value)
|
||||
elif name == "this":
|
||||
set(self, name, value)
|
||||
elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
|
||||
set(self, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add instance attributes to %s" % self)
|
||||
return set_instance_attr
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_class_variable(set):
|
||||
def set_class_attr(cls, name, value):
|
||||
if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
|
||||
set(cls, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add class attributes to %s" % cls)
|
||||
return set_class_attr
|
||||
|
||||
|
||||
def _swig_add_metaclass(metaclass):
|
||||
"""Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
|
||||
def wrapper(cls):
|
||||
return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
|
||||
return wrapper
|
||||
|
||||
|
||||
class _SwigNonDynamicMeta(type):
|
||||
"""Meta class to enforce nondynamic attributes (no new attributes) for a class"""
|
||||
__setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
|
||||
|
||||
|
||||
SEISCOMP_COMPONENT = _logging.SEISCOMP_COMPONENT
|
||||
SEISCOMP_LOG_API_VERSION = _logging.SEISCOMP_LOG_API_VERSION
|
||||
LL_UNDEFINED = _logging.LL_UNDEFINED
|
||||
LL_CRITICAL = _logging.LL_CRITICAL
|
||||
LL_ERROR = _logging.LL_ERROR
|
||||
LL_WARNING = _logging.LL_WARNING
|
||||
LL_NOTICE = _logging.LL_NOTICE
|
||||
LL_INFO = _logging.LL_INFO
|
||||
LL_DEBUG = _logging.LL_DEBUG
|
||||
LL_QUANTITY = _logging.LL_QUANTITY
|
||||
SEISCOMP_LOGGING_CURRENT_FUNCTION = _logging.SEISCOMP_LOGGING_CURRENT_FUNCTION
|
||||
|
||||
def debug(*args):
|
||||
return _logging.debug(*args)
|
||||
|
||||
def info(*args):
|
||||
return _logging.info(*args)
|
||||
|
||||
def warning(*args):
|
||||
return _logging.warning(*args)
|
||||
|
||||
def error(*args):
|
||||
return _logging.error(*args)
|
||||
|
||||
def notice(*args):
|
||||
return _logging.notice(*args)
|
||||
|
||||
def log(*args):
|
||||
return _logging.log(*args)
|
||||
|
||||
def getAll():
|
||||
return _logging.getAll()
|
||||
|
||||
def getGlobalChannel(*args):
|
||||
return _logging.getGlobalChannel(*args)
|
||||
|
||||
def getComponentChannel(*args):
|
||||
return _logging.getComponentChannel(*args)
|
||||
|
||||
def getComponentAll(component):
|
||||
return _logging.getComponentAll(component)
|
||||
|
||||
def getComponentDebugs(component):
|
||||
return _logging.getComponentDebugs(component)
|
||||
|
||||
def getComponentInfos(component):
|
||||
return _logging.getComponentInfos(component)
|
||||
|
||||
def getComponentWarnings(component):
|
||||
return _logging.getComponentWarnings(component)
|
||||
|
||||
def getComponentErrors(component):
|
||||
return _logging.getComponentErrors(component)
|
||||
|
||||
def getComponentNotices(component):
|
||||
return _logging.getComponentNotices(component)
|
||||
|
||||
def consoleOutput():
|
||||
return _logging.consoleOutput()
|
||||
|
||||
def enableConsoleLogging(arg1):
|
||||
return _logging.enableConsoleLogging(arg1)
|
||||
|
||||
def disableConsoleLogging():
|
||||
return _logging.disableConsoleLogging()
|
||||
class Output(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
raise AttributeError("No constructor defined - class is abstract")
|
||||
__repr__ = _swig_repr
|
||||
__swig_destroy__ = _logging.delete_Output
|
||||
|
||||
def subscribe(self, channel):
|
||||
return _logging.Output_subscribe(self, channel)
|
||||
|
||||
def unsubscribe(self, channel):
|
||||
return _logging.Output_unsubscribe(self, channel)
|
||||
|
||||
def logComponent(self, e):
|
||||
return _logging.Output_logComponent(self, e)
|
||||
|
||||
def logContext(self, e):
|
||||
return _logging.Output_logContext(self, e)
|
||||
|
||||
def setUTCEnabled(self, e):
|
||||
return _logging.Output_setUTCEnabled(self, e)
|
||||
|
||||
# Register Output in _logging:
|
||||
_logging.Output_swigregister(Output)
|
||||
|
||||
class FdOutput(Output):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, fdOut=2):
|
||||
_logging.FdOutput_swiginit(self, _logging.new_FdOutput(fdOut))
|
||||
__swig_destroy__ = _logging.delete_FdOutput
|
||||
|
||||
# Register FdOutput in _logging:
|
||||
_logging.FdOutput_swigregister(FdOutput)
|
||||
|
||||
class FileOutput(Output):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_logging.FileOutput_swiginit(self, _logging.new_FileOutput(*args))
|
||||
__swig_destroy__ = _logging.delete_FileOutput
|
||||
|
||||
def open(self, filename):
|
||||
return _logging.FileOutput_open(self, filename)
|
||||
|
||||
def isOpen(self):
|
||||
return _logging.FileOutput_isOpen(self)
|
||||
|
||||
# Register FileOutput in _logging:
|
||||
_logging.FileOutput_swigregister(FileOutput)
|
||||
|
||||
class FileRotatorOutput(FileOutput):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_logging.FileRotatorOutput_swiginit(self, _logging.new_FileRotatorOutput(*args))
|
||||
|
||||
def open(self, filename):
|
||||
return _logging.FileRotatorOutput_open(self, filename)
|
||||
__swig_destroy__ = _logging.delete_FileRotatorOutput
|
||||
|
||||
# Register FileRotatorOutput in _logging:
|
||||
_logging.FileRotatorOutput_swigregister(FileRotatorOutput)
|
||||
|
||||
class SyslogOutput(Output):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_logging.SyslogOutput_swiginit(self, _logging.new_SyslogOutput(*args))
|
||||
__swig_destroy__ = _logging.delete_SyslogOutput
|
||||
|
||||
def facility(self):
|
||||
return _logging.SyslogOutput_facility(self)
|
||||
|
||||
def open(self, ident, facility=None):
|
||||
return _logging.SyslogOutput_open(self, ident, facility)
|
||||
|
||||
def isOpen(self):
|
||||
return _logging.SyslogOutput_isOpen(self)
|
||||
|
||||
def close(self):
|
||||
return _logging.SyslogOutput_close(self)
|
||||
|
||||
# Register SyslogOutput in _logging:
|
||||
_logging.SyslogOutput_swigregister(SyslogOutput)
|
||||
|
||||
|
||||
|
54
lib/python/seiscomp/logs.py
Normal file
54
lib/python/seiscomp/logs.py
Normal file
@ -0,0 +1,54 @@
|
||||
#*****************************************************************************
|
||||
# logs.py
|
||||
#
|
||||
# SeisComP log handlers
|
||||
#
|
||||
# (c) 2005 Andres Heinloo, GFZ Potsdam
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 2, or (at your option) any later
|
||||
# version. For more information, see http://www.gnu.org/
|
||||
#*****************************************************************************
|
||||
|
||||
from __future__ import (absolute_import, division, print_function,
|
||||
unicode_literals)
|
||||
|
||||
import sys as _sys
|
||||
import traceback as _traceback
|
||||
|
||||
class _Logf(object):
|
||||
def write(self, s):
|
||||
error(s.rstrip())
|
||||
|
||||
def print_exc():
|
||||
_traceback.print_exc(file=_Logf())
|
||||
|
||||
# Default handlers, to be overridden by packages, eg.:
|
||||
#
|
||||
# def log_info(s):
|
||||
# print time.ctime() + " - trigger: " + s
|
||||
# sys.stdout.flush()
|
||||
#
|
||||
# seiscomp.logs.info = log_info
|
||||
|
||||
def debug(s):
|
||||
_sys.stdout.write(s + "\n")
|
||||
_sys.stdout.flush()
|
||||
|
||||
def info(s):
|
||||
_sys.stdout.write(s + "\n")
|
||||
_sys.stdout.flush()
|
||||
|
||||
def notice(s):
|
||||
_sys.stdout.write(s + "\n")
|
||||
_sys.stdout.flush()
|
||||
|
||||
def warning(s):
|
||||
_sys.stdout.write(s + "\n")
|
||||
_sys.stdout.flush()
|
||||
|
||||
def error(s):
|
||||
_sys.stdout.write(s + "\n")
|
||||
_sys.stdout.flush()
|
||||
|
3139
lib/python/seiscomp/math.py
Normal file
3139
lib/python/seiscomp/math.py
Normal file
File diff suppressed because it is too large
Load Diff
491
lib/python/seiscomp/mseedlite.py
Normal file
491
lib/python/seiscomp/mseedlite.py
Normal file
@ -0,0 +1,491 @@
|
||||
"""Python-only Mini-SEED module with limited functionality.
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
any later version.
|
||||
|
||||
:Copyright:
|
||||
2005 Andres Heinloo, GEOFON, GFZ Potsdam <geofon@gfz-potsdam.de>
|
||||
:License:
|
||||
GPLv3
|
||||
:Platform:
|
||||
Linux
|
||||
|
||||
.. moduleauthor:: Andres Heinloo <andres@gfz-potsdam.de>, GEOFON, GFZ Potsdam
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import datetime
|
||||
import struct
|
||||
import sys
|
||||
from io import BytesIO
|
||||
|
||||
_FIXHEAD_LEN = 48
|
||||
_BLKHEAD_LEN = 4
|
||||
_BLK1000_LEN = 4
|
||||
_BLK1001_LEN = 4
|
||||
_MAX_RECLEN = 4096
|
||||
|
||||
_doy = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365)
|
||||
|
||||
|
||||
def _is_leap(y):
|
||||
"""True if y is a leap year."""
|
||||
return (y % 400 == 0) or (y % 4 == 0 and y % 100 != 0)
|
||||
|
||||
|
||||
def _ldoy(y, m):
|
||||
"""The day of the year of the first day of month m, in year y.
|
||||
|
||||
Note: for January, m=1; for December, m=12.
|
||||
Examples:
|
||||
_ldoy(1900, 4) = 90
|
||||
_ldoy(1900, 1) = 0
|
||||
_ldoy(1999, 4) = 90
|
||||
_ldoy(2004, 4) = 91
|
||||
_ldoy(2000, 4) = 91
|
||||
|
||||
"""
|
||||
return _doy[m - 1] + (_is_leap(y) and m >= 3)
|
||||
|
||||
|
||||
def _dy2mdy(doy, year):
|
||||
month = 1
|
||||
while doy > _ldoy(year, month + 1):
|
||||
month += 1
|
||||
|
||||
mday = doy - _ldoy(year, month)
|
||||
return (month, mday)
|
||||
|
||||
|
||||
def _mdy2dy(month, day, year):
|
||||
return _ldoy(year, month) + day
|
||||
|
||||
|
||||
class EndOfData(Exception):
|
||||
"""."""
|
||||
|
||||
|
||||
class MSeedError(Exception):
|
||||
"""."""
|
||||
|
||||
|
||||
class MSeedNoData(MSeedError):
|
||||
"""."""
|
||||
|
||||
|
||||
class Record(object):
|
||||
"""Mini-SEED record."""
|
||||
|
||||
def __init__(self, src):
|
||||
"""Create a Mini-SEED record from a file handle or a bitstream."""
|
||||
if type(src) == bytes:
|
||||
fd = BytesIO(src)
|
||||
elif hasattr(src, "read"):
|
||||
fd = src
|
||||
else:
|
||||
raise TypeError("argument is neither bytes nor a file object")
|
||||
|
||||
# self.header = ""
|
||||
self.header = bytes()
|
||||
fixhead = fd.read(_FIXHEAD_LEN)
|
||||
|
||||
if len(fixhead) == 0:
|
||||
# FIXME Check if there is no better option, but NOT StopIteration!
|
||||
raise EndOfData
|
||||
|
||||
if len(fixhead) < _FIXHEAD_LEN:
|
||||
raise MSeedError("unexpected end of header")
|
||||
|
||||
(
|
||||
recno_str,
|
||||
self.rectype,
|
||||
sta,
|
||||
loc,
|
||||
cha,
|
||||
net,
|
||||
bt_year,
|
||||
bt_doy,
|
||||
bt_hour,
|
||||
bt_minute,
|
||||
bt_second,
|
||||
bt_tms,
|
||||
self.nsamp,
|
||||
self.sr_factor,
|
||||
self.sr_mult,
|
||||
self.aflgs,
|
||||
self.cflgs,
|
||||
self.qflgs,
|
||||
self.__num_blk,
|
||||
self.time_correction,
|
||||
self.__pdata,
|
||||
self.__pblk,
|
||||
) = struct.unpack(">6scx5s2s3s2s2H3Bx2H2h4Bl2H", fixhead)
|
||||
|
||||
if sys.version_info[0] > 2:
|
||||
recno_str = recno_str.decode("utf-8")
|
||||
self.rectype = self.rectype.decode("utf-8")
|
||||
sta = sta.decode("utf-8")
|
||||
loc = loc.decode("utf-8")
|
||||
cha = cha.decode("utf-8")
|
||||
net = net.decode("utf-8")
|
||||
|
||||
self.header += fixhead
|
||||
|
||||
if self.rectype not in ("D", "R", "Q", "M"):
|
||||
fd.read(_MAX_RECLEN - _FIXHEAD_LEN)
|
||||
raise MSeedNoData("non-data record")
|
||||
|
||||
if self.__pdata >= _MAX_RECLEN:
|
||||
raise MSeedError(
|
||||
f"invalid pointer at {net.strip()}.{sta.strip()}.{loc.strip()}.{cha.strip()}: "
|
||||
f"record size ({self.__pdata}) >= {_MAX_RECLEN}"
|
||||
)
|
||||
if self.__pdata < _FIXHEAD_LEN or (
|
||||
self.__pblk != 0
|
||||
and ((self.__pblk < _FIXHEAD_LEN) or (self.__pblk >= self.__pdata))
|
||||
):
|
||||
raise MSeedError(
|
||||
f"invalid pointer at {net.strip()}.{sta.strip()}.{loc.strip()}.{cha.strip()}"
|
||||
)
|
||||
|
||||
if self.__pblk == 0:
|
||||
blklen = 0
|
||||
else:
|
||||
blklen = self.__pdata - self.__pblk
|
||||
gaplen = self.__pblk - _FIXHEAD_LEN
|
||||
gap = fd.read(gaplen)
|
||||
if len(gap) < gaplen:
|
||||
raise MSeedError("unexpected end of data")
|
||||
|
||||
self.header += gap
|
||||
|
||||
# defaults
|
||||
self.encoding = 11
|
||||
self.byteorder = 1
|
||||
rec_len_exp = 12
|
||||
self.time_quality = -1
|
||||
micros = 0
|
||||
self.nframes = None
|
||||
self.__rec_len_exp_idx = None
|
||||
self.__micros_idx = None
|
||||
self.__nframes_idx = None
|
||||
|
||||
pos = 0
|
||||
while pos < blklen:
|
||||
blkhead = fd.read(_BLKHEAD_LEN)
|
||||
if len(blkhead) < _BLKHEAD_LEN:
|
||||
raise MSeedError(f"unexpected end of blockettes at{pos}{len(blkhead)}")
|
||||
|
||||
(blktype, nextblk) = struct.unpack(">2H", blkhead)
|
||||
self.header += blkhead
|
||||
pos += _BLKHEAD_LEN
|
||||
|
||||
if blktype == 1000:
|
||||
blk1000 = fd.read(_BLK1000_LEN)
|
||||
if len(blk1000) < _BLK1000_LEN:
|
||||
raise MSeedError(
|
||||
f"unexpected end of blockettes at {pos}{len(blk1000)}"
|
||||
)
|
||||
|
||||
(self.encoding, self.byteorder, rec_len_exp) = struct.unpack(
|
||||
">3Bx", blk1000
|
||||
)
|
||||
|
||||
self.__rec_len_exp_idx = self.__pblk + pos + 2
|
||||
self.header += blk1000
|
||||
pos += _BLK1000_LEN
|
||||
|
||||
elif blktype == 1001:
|
||||
blk1001 = fd.read(_BLK1001_LEN)
|
||||
if len(blk1001) < _BLK1001_LEN:
|
||||
raise MSeedError(
|
||||
f"unexpected end of blockettes at {pos}{len(blk1001)}"
|
||||
)
|
||||
|
||||
(self.time_quality, micros, self.nframes) = struct.unpack(
|
||||
">BbxB", blk1001
|
||||
)
|
||||
|
||||
self.__micros_idx = self.__pblk + pos + 1
|
||||
self.__nframes_idx = self.__pblk + pos + 3
|
||||
self.header += blk1001
|
||||
pos += _BLK1001_LEN
|
||||
|
||||
if nextblk == 0:
|
||||
break
|
||||
|
||||
if nextblk < self.__pblk + pos or nextblk >= self.__pdata:
|
||||
raise MSeedError("invalid pointers")
|
||||
|
||||
gaplen = nextblk - (self.__pblk + pos)
|
||||
gap = fd.read(gaplen)
|
||||
if len(gap) < gaplen:
|
||||
raise MSeedError("unexpected end of data")
|
||||
|
||||
self.header += gap
|
||||
pos += gaplen
|
||||
|
||||
if pos > blklen:
|
||||
raise MSeedError("corrupt record")
|
||||
|
||||
gaplen = self.__pdata - len(self.header)
|
||||
gap = fd.read(gaplen)
|
||||
if len(gap) < gaplen:
|
||||
raise MSeedError("unexpected end of data")
|
||||
|
||||
self.header += gap
|
||||
pos += gaplen
|
||||
|
||||
self.recno = int(recno_str)
|
||||
self.net = net.strip()
|
||||
self.sta = sta.strip()
|
||||
self.loc = loc.strip()
|
||||
self.cha = cha.strip()
|
||||
|
||||
if (self.sr_factor > 0) and (self.sr_mult > 0):
|
||||
self.samprate_num = self.sr_factor * self.sr_mult
|
||||
self.samprate_denom = 1
|
||||
elif (self.sr_factor > 0) and (self.sr_mult < 0):
|
||||
self.samprate_num = self.sr_factor
|
||||
self.samprate_denom = -self.sr_mult
|
||||
elif (self.sr_factor < 0) and (self.sr_mult > 0):
|
||||
self.samprate_num = self.sr_mult
|
||||
self.samprate_denom = -self.sr_factor
|
||||
elif (self.sr_factor < 0) and (self.sr_mult < 0):
|
||||
self.samprate_num = 1
|
||||
self.samprate_denom = self.sr_factor * self.sr_mult
|
||||
else:
|
||||
self.samprate_num = 0
|
||||
self.samprate_denom = 1
|
||||
|
||||
self.fsamp = float(self.samprate_num) / float(self.samprate_denom)
|
||||
|
||||
# quick fix to avoid exception from datetime
|
||||
if bt_second > 59:
|
||||
self.leap = bt_second - 59
|
||||
bt_second = 59
|
||||
else:
|
||||
self.leap = 0
|
||||
|
||||
try:
|
||||
(month, day) = _dy2mdy(bt_doy, bt_year)
|
||||
self.begin_time = datetime.datetime(
|
||||
bt_year, month, day, bt_hour, bt_minute, bt_second
|
||||
)
|
||||
|
||||
self.begin_time += datetime.timedelta(microseconds=bt_tms * 100 + micros)
|
||||
|
||||
if (self.nsamp != 0) and (self.fsamp != 0):
|
||||
msAux = 1000000 * self.nsamp / self.fsamp
|
||||
self.end_time = self.begin_time + datetime.timedelta(microseconds=msAux)
|
||||
else:
|
||||
self.end_time = self.begin_time
|
||||
|
||||
except ValueError as e:
|
||||
raise MSeedError(f"invalid time: {str(e)}")
|
||||
|
||||
self.size = 1 << rec_len_exp
|
||||
if (self.size < len(self.header)) or (self.size > _MAX_RECLEN):
|
||||
raise MSeedError("invalid record size")
|
||||
|
||||
datalen = self.size - self.__pdata
|
||||
self.data = fd.read(datalen)
|
||||
if len(self.data) < datalen:
|
||||
raise MSeedError("unexpected end of data")
|
||||
|
||||
if len(self.header) + len(self.data) != self.size:
|
||||
raise MSeedError("internal error")
|
||||
|
||||
(self.X0, self.Xn) = struct.unpack(">ll", self.data[4:12])
|
||||
|
||||
(w0,) = struct.unpack(">L", self.data[:4])
|
||||
(w3,) = struct.unpack(">L", self.data[12:16])
|
||||
c3 = (w0 >> 24) & 0x3
|
||||
d0 = None
|
||||
|
||||
if self.encoding == 10:
|
||||
# """STEIM (1) Compression?"""
|
||||
if c3 == 1:
|
||||
d0 = (w3 >> 24) & 0xFF
|
||||
if d0 > 0x7F:
|
||||
d0 -= 0x100
|
||||
elif c3 == 2:
|
||||
d0 = (w3 >> 16) & 0xFFFF
|
||||
if d0 > 0x7FFF:
|
||||
d0 -= 0x10000
|
||||
elif c3 == 3:
|
||||
d0 = w3 & 0xFFFFFFFF
|
||||
if d0 > 0x7FFFFFFF:
|
||||
d0 -= 0xFFFFFFFF
|
||||
d0 -= 1
|
||||
|
||||
elif self.encoding == 11:
|
||||
# """STEIM (2) Compression?"""
|
||||
if c3 == 1:
|
||||
d0 = (w3 >> 24) & 0xFF
|
||||
if d0 > 0x7F:
|
||||
d0 -= 0x100
|
||||
elif c3 == 2:
|
||||
dnib = (w3 >> 30) & 0x3
|
||||
if dnib == 1:
|
||||
d0 = w3 & 0x3FFFFFFF
|
||||
if d0 > 0x1FFFFFFF:
|
||||
d0 -= 0x40000000
|
||||
elif dnib == 2:
|
||||
d0 = (w3 >> 15) & 0x7FFF
|
||||
if d0 > 0x3FFF:
|
||||
d0 -= 0x8000
|
||||
elif dnib == 3:
|
||||
d0 = (w3 >> 20) & 0x3FF
|
||||
if d0 > 0x1FF:
|
||||
d0 -= 0x400
|
||||
elif c3 == 3:
|
||||
dnib = (w3 >> 30) & 0x3
|
||||
if dnib == 0:
|
||||
d0 = (w3 >> 24) & 0x3F
|
||||
if d0 > 0x1F:
|
||||
d0 -= 0x40
|
||||
elif dnib == 1:
|
||||
d0 = (w3 >> 25) & 0x1F
|
||||
if d0 > 0xF:
|
||||
d0 -= 0x20
|
||||
elif dnib == 2:
|
||||
d0 = (w3 >> 24) & 0xF
|
||||
if d0 > 0x7:
|
||||
d0 -= 0x10
|
||||
|
||||
if d0 is not None:
|
||||
self.X_minus1 = self.X0 - d0
|
||||
else:
|
||||
self.X_minus1 = None
|
||||
|
||||
if (self.nframes is None) or (self.nframes == 0):
|
||||
i = 0
|
||||
self.nframes = 0
|
||||
while i < len(self.data):
|
||||
if self.data[i] == "\0":
|
||||
break
|
||||
|
||||
i += 64
|
||||
self.nframes += 1
|
||||
|
||||
def merge(self, rec):
|
||||
"""Caller is expected to check for contiguity of data.
|
||||
|
||||
Check if rec.nframes * 64 <= len(data)?
|
||||
"""
|
||||
(self.Xn,) = struct.unpack(">l", rec.data[8:12])
|
||||
self.data += rec.data[: rec.nframes * 64]
|
||||
self.nframes += rec.nframes
|
||||
self.nsamp += rec.nsamp
|
||||
self.size = len(self.header) + len(self.data)
|
||||
self.end_time = rec.end_time
|
||||
|
||||
def write(self, fd, rec_len_exp):
|
||||
"""Write the record to an already opened file."""
|
||||
if self.size > (1 << rec_len_exp):
|
||||
raise MSeedError(
|
||||
f"record is larger than requested write size: {self.size} > {1 << rec_len_exp}"
|
||||
)
|
||||
|
||||
recno_str = bytes(("%06d" % (self.recno,)).encode("utf-8"))
|
||||
sta = bytes(("%-5.5s" % (self.sta,)).encode("utf-8"))
|
||||
loc = bytes(("%-2.2s" % (self.loc,)).encode("utf-8"))
|
||||
cha = bytes(("%-3.3s" % (self.cha,)).encode("utf-8"))
|
||||
net = bytes(("%-2.2s" % (self.net,)).encode("utf-8"))
|
||||
bt_year = self.begin_time.year
|
||||
bt_doy = _mdy2dy(
|
||||
self.begin_time.month, self.begin_time.day, self.begin_time.year
|
||||
)
|
||||
bt_hour = self.begin_time.hour
|
||||
bt_minute = self.begin_time.minute
|
||||
bt_second = self.begin_time.second + self.leap
|
||||
bt_tms = self.begin_time.microsecond // 100
|
||||
micros = self.begin_time.microsecond % 100
|
||||
|
||||
# This is just to make it Python 2 AND 3 compatible (str vs. bytes)
|
||||
rectype = (
|
||||
self.rectype.encode("utf-8") if sys.version_info[0] > 2 else self.rectype
|
||||
)
|
||||
|
||||
buf = struct.pack(
|
||||
">6s2c5s2s3s2s2H3Bx2H2h4Bl2H",
|
||||
recno_str,
|
||||
rectype,
|
||||
b" ",
|
||||
sta,
|
||||
loc,
|
||||
cha,
|
||||
net,
|
||||
bt_year,
|
||||
bt_doy,
|
||||
bt_hour,
|
||||
bt_minute,
|
||||
bt_second,
|
||||
bt_tms,
|
||||
self.nsamp,
|
||||
self.sr_factor,
|
||||
self.sr_mult,
|
||||
self.aflgs,
|
||||
self.cflgs,
|
||||
self.qflgs,
|
||||
self.__num_blk,
|
||||
self.time_correction,
|
||||
self.__pdata,
|
||||
self.__pblk,
|
||||
)
|
||||
fd.write(buf)
|
||||
|
||||
buf = list(self.header[_FIXHEAD_LEN:])
|
||||
|
||||
if self.__rec_len_exp_idx is not None:
|
||||
buf[self.__rec_len_exp_idx - _FIXHEAD_LEN] = struct.pack(">B", rec_len_exp)
|
||||
|
||||
if self.__micros_idx is not None:
|
||||
buf[self.__micros_idx - _FIXHEAD_LEN] = struct.pack(">b", micros)
|
||||
|
||||
if self.__nframes_idx is not None:
|
||||
buf[self.__nframes_idx - _FIXHEAD_LEN] = struct.pack(">B", self.nframes)
|
||||
|
||||
ba = bytearray()
|
||||
for b in buf:
|
||||
try:
|
||||
ba.append(b)
|
||||
except Exception:
|
||||
ba.append(int.from_bytes(b, byteorder="big"))
|
||||
fd.write(ba)
|
||||
|
||||
buf = (
|
||||
self.data[:4]
|
||||
+ struct.pack(">ll", self.X0, self.Xn)
|
||||
+ self.data[12:]
|
||||
+ ((1 << rec_len_exp) - self.size) * b"\0"
|
||||
)
|
||||
|
||||
fd.write(buf)
|
||||
|
||||
|
||||
class Input(object):
|
||||
"""Iterate over the available Mini-SEED records."""
|
||||
|
||||
def __init__(self, fd):
|
||||
"""Create the iterable from the file handle passed as parameter."""
|
||||
self.__fd = fd
|
||||
|
||||
def __iter__(self):
|
||||
"""Define the iterator."""
|
||||
while True:
|
||||
try:
|
||||
yield Record(self.__fd)
|
||||
|
||||
except EndOfData:
|
||||
# This change follows new PEP-479, where it is explicitly forbidden to
|
||||
# use StopIteration
|
||||
# raise StopIteration
|
||||
return
|
||||
|
||||
except MSeedNoData:
|
||||
pass
|
187
lib/python/seiscomp/myconfig.py
Normal file
187
lib/python/seiscomp/myconfig.py
Normal file
@ -0,0 +1,187 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
import xml.dom.minidom
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
from configparser import RawConfigParser
|
||||
else:
|
||||
from ConfigParser import RawConfigParser
|
||||
|
||||
def readConfig(fileName):
|
||||
cp = RawConfigParser()
|
||||
fp = open(fileName, 'r')
|
||||
if sys.version_info < (3, 2):
|
||||
cp.readfp(fp) # pylint: disable=W1505
|
||||
else:
|
||||
cp.read_file(fp, fileName)
|
||||
return cp
|
||||
|
||||
|
||||
def parseXMLnode(root):
|
||||
"""
|
||||
Parses an XML tree starting from root node and returns a list of
|
||||
tuples containing name, attributes and content of all child nodes.
|
||||
"""
|
||||
|
||||
nodes = []
|
||||
|
||||
if root.hasChildNodes():
|
||||
for node in [ node for node in root.childNodes
|
||||
if node.nodeType==node.ELEMENT_NODE ]:
|
||||
|
||||
ncn = len(node.childNodes)
|
||||
if ncn==0:
|
||||
content = None
|
||||
elif ncn==1 and node.firstChild.nodeValue:
|
||||
content = node.firstChild.nodeValue.strip()
|
||||
else:
|
||||
content = parseXMLnode(node)
|
||||
|
||||
attrs = {}
|
||||
if node.hasAttributes():
|
||||
for i in range(node.attributes.length):
|
||||
attr = node.attributes.item(i)
|
||||
name = attr.nodeName
|
||||
attrs[name] = attr.nodeValue.strip()
|
||||
|
||||
nodes.append((node.nodeName, attrs, content))
|
||||
|
||||
return nodes
|
||||
|
||||
def parseXMLfile(f):
|
||||
root = xml.dom.minidom.parse(f)
|
||||
x = parseXMLnode(root)
|
||||
if len(x)==1:
|
||||
return x[0]
|
||||
# else not 1 root element, but that's cought by xml.dom.minidom.parse()
|
||||
|
||||
class MyConfig(dict):
|
||||
|
||||
def __init__(self, filename):
|
||||
|
||||
if filename[-4:].lower() == ".ini":
|
||||
self.readINI(filename)
|
||||
elif filename[-4:].lower() == ".xml":
|
||||
self.readXML(filename)
|
||||
else: print("XXXXXXXXXXXXXXX")
|
||||
|
||||
def readINI(self, filename):
|
||||
config = readConfig(filename)
|
||||
|
||||
for sec in config.sections():
|
||||
d = self[sec] = {}
|
||||
for opt in config.options(sec):
|
||||
d[opt] = config.get(sec, opt)
|
||||
|
||||
def readXML(self, filename):
|
||||
# XXX XML support is only provided for testing.
|
||||
name, attrs, content = parseXMLfile(filename)
|
||||
assert name == "config"
|
||||
for name, attrs, content in content:
|
||||
assert "name" in attrs
|
||||
sec = attrs["name"]
|
||||
assert name == "section"
|
||||
d = self[sec] = {}
|
||||
|
||||
for name, attrs, content in content:
|
||||
if isinstance(content, list):
|
||||
raise TypeError("<%s> elements can't have children" % name)
|
||||
|
||||
if name == "string":
|
||||
tmp = str(content)
|
||||
elif name == "int":
|
||||
tmp = int(content)
|
||||
elif name == "float":
|
||||
tmp = float(content)
|
||||
else:
|
||||
raise NameError("illegal tag '%s'" % name)
|
||||
|
||||
if not "name" in attrs:
|
||||
raise NameError("missing 'name' attribute in <%s>" % name)
|
||||
opt = attrs["name"]
|
||||
d[opt] = tmp
|
||||
|
||||
|
||||
class ConfigINI(dict):
|
||||
|
||||
def __init__(self, filename, mandatory=None):
|
||||
self.read(filename)
|
||||
if not isinstance(mandatory,list):
|
||||
mandatory = []
|
||||
self.mandatory = mandatory
|
||||
|
||||
def read(self, filename):
|
||||
config = readConfig(filename)
|
||||
|
||||
for sec in config.sections():
|
||||
d = self[sec] = {}
|
||||
for opt in config.options(sec):
|
||||
d[opt] = config.get(sec, opt)
|
||||
|
||||
def fillDefault(self, defaultSection="default"):
|
||||
default = self[defaultSection]
|
||||
|
||||
for section in self:
|
||||
if section == defaultSection:
|
||||
continue
|
||||
|
||||
# for missing items, use the default
|
||||
for item in default:
|
||||
if item not in self[section]:
|
||||
self[section][item] = default[item]
|
||||
|
||||
# # check for items that don't appear in the default
|
||||
# for item in self[section]:
|
||||
# if item not in default and item not in self.mandatory:
|
||||
# msg("[%s]: unmatched item '%s'" % (section, item))
|
||||
|
||||
for item in self.mandatory:
|
||||
if item not in self[section]:
|
||||
msg("[%s]: missing item '%s'" % (section, item))
|
||||
# XXX this could also be treated as a fatal error
|
||||
|
||||
|
||||
class ConfigXML(MyConfig):
|
||||
|
||||
def __init__(self, filename):
|
||||
self.read(filename)
|
||||
|
||||
def read(self, filename):
|
||||
# XXX XML support is only provided for testing.
|
||||
name, attrs, content = parseXMLfile(filename)
|
||||
assert name == "config"
|
||||
for name, attrs, content in content:
|
||||
assert "name" in attrs
|
||||
sec = attrs["name"]
|
||||
assert name == "section"
|
||||
d = self[sec] = {}
|
||||
|
||||
for name, attrs, content in content:
|
||||
if isinstance(content, list):
|
||||
raise TypeError("<%s> elements can't have children" % name)
|
||||
|
||||
if name == "string":
|
||||
tmp = str(content)
|
||||
elif name == "int":
|
||||
tmp = int(content)
|
||||
elif name == "float":
|
||||
tmp = float(content)
|
||||
else:
|
||||
raise NameError("illegal tag '%s'" % name)
|
||||
|
||||
if not "name" in attrs:
|
||||
raise NameError("missing 'name' attribute in <%s>" % name)
|
||||
opt = attrs["name"]
|
||||
d[opt] = tmp
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
for f in "test.ini", "test.xml":
|
||||
print("#### filename=", f)
|
||||
config = MyConfig(f)
|
||||
print(config)
|
||||
for section in config:
|
||||
print(section, config[section])
|
||||
|
1592
lib/python/seiscomp/scbulletin.py
Normal file
1592
lib/python/seiscomp/scbulletin.py
Normal file
File diff suppressed because it is too large
Load Diff
899
lib/python/seiscomp/seismology.py
Normal file
899
lib/python/seiscomp/seismology.py
Normal file
@ -0,0 +1,899 @@
|
||||
# This file was automatically generated by SWIG (http://www.swig.org).
|
||||
# Version 4.0.2
|
||||
#
|
||||
# Do not make changes to this file unless you know what you are doing--modify
|
||||
# the SWIG interface file instead.
|
||||
|
||||
"""Codes for various seismological computations"""
|
||||
|
||||
from sys import version_info as _swig_python_version_info
|
||||
if _swig_python_version_info < (2, 7, 0):
|
||||
raise RuntimeError("Python 2.7 or later required")
|
||||
|
||||
# Import the low-level C/C++ module
|
||||
if __package__ or "." in __name__:
|
||||
from . import _seismology
|
||||
else:
|
||||
import _seismology
|
||||
|
||||
try:
|
||||
import builtins as __builtin__
|
||||
except ImportError:
|
||||
import __builtin__
|
||||
|
||||
def _swig_repr(self):
|
||||
try:
|
||||
strthis = "proxy of " + self.this.__repr__()
|
||||
except __builtin__.Exception:
|
||||
strthis = ""
|
||||
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_instance_variable(set):
|
||||
def set_instance_attr(self, name, value):
|
||||
if name == "thisown":
|
||||
self.this.own(value)
|
||||
elif name == "this":
|
||||
set(self, name, value)
|
||||
elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
|
||||
set(self, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add instance attributes to %s" % self)
|
||||
return set_instance_attr
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_class_variable(set):
|
||||
def set_class_attr(cls, name, value):
|
||||
if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
|
||||
set(cls, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add class attributes to %s" % cls)
|
||||
return set_class_attr
|
||||
|
||||
|
||||
def _swig_add_metaclass(metaclass):
|
||||
"""Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
|
||||
def wrapper(cls):
|
||||
return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
|
||||
return wrapper
|
||||
|
||||
|
||||
class _SwigNonDynamicMeta(type):
|
||||
"""Meta class to enforce nondynamic attributes (no new attributes) for a class"""
|
||||
__setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
|
||||
|
||||
|
||||
import weakref
|
||||
|
||||
class SwigPyIterator(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
raise AttributeError("No constructor defined - class is abstract")
|
||||
__repr__ = _swig_repr
|
||||
__swig_destroy__ = _seismology.delete_SwigPyIterator
|
||||
|
||||
def value(self):
|
||||
return _seismology.SwigPyIterator_value(self)
|
||||
|
||||
def incr(self, n=1):
|
||||
return _seismology.SwigPyIterator_incr(self, n)
|
||||
|
||||
def decr(self, n=1):
|
||||
return _seismology.SwigPyIterator_decr(self, n)
|
||||
|
||||
def distance(self, x):
|
||||
return _seismology.SwigPyIterator_distance(self, x)
|
||||
|
||||
def equal(self, x):
|
||||
return _seismology.SwigPyIterator_equal(self, x)
|
||||
|
||||
def copy(self):
|
||||
return _seismology.SwigPyIterator_copy(self)
|
||||
|
||||
def next(self):
|
||||
return _seismology.SwigPyIterator_next(self)
|
||||
|
||||
def __next__(self):
|
||||
return _seismology.SwigPyIterator___next__(self)
|
||||
|
||||
def previous(self):
|
||||
return _seismology.SwigPyIterator_previous(self)
|
||||
|
||||
def advance(self, n):
|
||||
return _seismology.SwigPyIterator_advance(self, n)
|
||||
|
||||
def __eq__(self, x):
|
||||
return _seismology.SwigPyIterator___eq__(self, x)
|
||||
|
||||
def __ne__(self, x):
|
||||
return _seismology.SwigPyIterator___ne__(self, x)
|
||||
|
||||
def __iadd__(self, n):
|
||||
return _seismology.SwigPyIterator___iadd__(self, n)
|
||||
|
||||
def __isub__(self, n):
|
||||
return _seismology.SwigPyIterator___isub__(self, n)
|
||||
|
||||
def __add__(self, n):
|
||||
return _seismology.SwigPyIterator___add__(self, n)
|
||||
|
||||
def __sub__(self, *args):
|
||||
return _seismology.SwigPyIterator___sub__(self, *args)
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
# Register SwigPyIterator in _seismology:
|
||||
_seismology.SwigPyIterator_swigregister(SwigPyIterator)
|
||||
|
||||
import seiscomp.io
|
||||
import seiscomp.math
|
||||
import seiscomp.core
|
||||
import seiscomp.datamodel
|
||||
import seiscomp.geo
|
||||
import seiscomp.config
|
||||
class TravelTimeList_internal(object):
|
||||
r"""Proxy of C++ std::list< Seiscomp::TravelTime > class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def iterator(self):
|
||||
r"""iterator(TravelTimeList_internal self) -> SwigPyIterator"""
|
||||
return _seismology.TravelTimeList_internal_iterator(self)
|
||||
def __iter__(self):
|
||||
return self.iterator()
|
||||
|
||||
def __nonzero__(self):
|
||||
r"""__nonzero__(TravelTimeList_internal self) -> bool"""
|
||||
return _seismology.TravelTimeList_internal___nonzero__(self)
|
||||
|
||||
def __bool__(self):
|
||||
r"""__bool__(TravelTimeList_internal self) -> bool"""
|
||||
return _seismology.TravelTimeList_internal___bool__(self)
|
||||
|
||||
def __len__(self):
|
||||
r"""__len__(TravelTimeList_internal self) -> std::list< Seiscomp::TravelTime >::size_type"""
|
||||
return _seismology.TravelTimeList_internal___len__(self)
|
||||
|
||||
def __getslice__(self, i, j):
|
||||
r"""__getslice__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::difference_type i, std::list< Seiscomp::TravelTime >::difference_type j) -> TravelTimeList_internal"""
|
||||
return _seismology.TravelTimeList_internal___getslice__(self, i, j)
|
||||
|
||||
def __setslice__(self, *args):
|
||||
r"""
|
||||
__setslice__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::difference_type i, std::list< Seiscomp::TravelTime >::difference_type j)
|
||||
__setslice__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::difference_type i, std::list< Seiscomp::TravelTime >::difference_type j, TravelTimeList_internal v)
|
||||
"""
|
||||
return _seismology.TravelTimeList_internal___setslice__(self, *args)
|
||||
|
||||
def __delslice__(self, i, j):
|
||||
r"""__delslice__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::difference_type i, std::list< Seiscomp::TravelTime >::difference_type j)"""
|
||||
return _seismology.TravelTimeList_internal___delslice__(self, i, j)
|
||||
|
||||
def __delitem__(self, *args):
|
||||
r"""
|
||||
__delitem__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::difference_type i)
|
||||
__delitem__(TravelTimeList_internal self, PySliceObject * slice)
|
||||
"""
|
||||
return _seismology.TravelTimeList_internal___delitem__(self, *args)
|
||||
|
||||
def __getitem__(self, *args):
|
||||
r"""
|
||||
__getitem__(TravelTimeList_internal self, PySliceObject * slice) -> TravelTimeList_internal
|
||||
__getitem__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::difference_type i) -> TravelTime
|
||||
"""
|
||||
return _seismology.TravelTimeList_internal___getitem__(self, *args)
|
||||
|
||||
def __setitem__(self, *args):
|
||||
r"""
|
||||
__setitem__(TravelTimeList_internal self, PySliceObject * slice, TravelTimeList_internal v)
|
||||
__setitem__(TravelTimeList_internal self, PySliceObject * slice)
|
||||
__setitem__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::difference_type i, TravelTime x)
|
||||
"""
|
||||
return _seismology.TravelTimeList_internal___setitem__(self, *args)
|
||||
|
||||
def pop(self):
|
||||
r"""pop(TravelTimeList_internal self) -> TravelTime"""
|
||||
return _seismology.TravelTimeList_internal_pop(self)
|
||||
|
||||
def append(self, x):
|
||||
r"""append(TravelTimeList_internal self, TravelTime x)"""
|
||||
return _seismology.TravelTimeList_internal_append(self, x)
|
||||
|
||||
def empty(self):
|
||||
r"""empty(TravelTimeList_internal self) -> bool"""
|
||||
return _seismology.TravelTimeList_internal_empty(self)
|
||||
|
||||
def size(self):
|
||||
r"""size(TravelTimeList_internal self) -> std::list< Seiscomp::TravelTime >::size_type"""
|
||||
return _seismology.TravelTimeList_internal_size(self)
|
||||
|
||||
def swap(self, v):
|
||||
r"""swap(TravelTimeList_internal self, TravelTimeList_internal v)"""
|
||||
return _seismology.TravelTimeList_internal_swap(self, v)
|
||||
|
||||
def begin(self):
|
||||
r"""begin(TravelTimeList_internal self) -> std::list< Seiscomp::TravelTime >::iterator"""
|
||||
return _seismology.TravelTimeList_internal_begin(self)
|
||||
|
||||
def end(self):
|
||||
r"""end(TravelTimeList_internal self) -> std::list< Seiscomp::TravelTime >::iterator"""
|
||||
return _seismology.TravelTimeList_internal_end(self)
|
||||
|
||||
def rbegin(self):
|
||||
r"""rbegin(TravelTimeList_internal self) -> std::list< Seiscomp::TravelTime >::reverse_iterator"""
|
||||
return _seismology.TravelTimeList_internal_rbegin(self)
|
||||
|
||||
def rend(self):
|
||||
r"""rend(TravelTimeList_internal self) -> std::list< Seiscomp::TravelTime >::reverse_iterator"""
|
||||
return _seismology.TravelTimeList_internal_rend(self)
|
||||
|
||||
def clear(self):
|
||||
r"""clear(TravelTimeList_internal self)"""
|
||||
return _seismology.TravelTimeList_internal_clear(self)
|
||||
|
||||
def get_allocator(self):
|
||||
r"""get_allocator(TravelTimeList_internal self) -> std::list< Seiscomp::TravelTime >::allocator_type"""
|
||||
return _seismology.TravelTimeList_internal_get_allocator(self)
|
||||
|
||||
def pop_back(self):
|
||||
r"""pop_back(TravelTimeList_internal self)"""
|
||||
return _seismology.TravelTimeList_internal_pop_back(self)
|
||||
|
||||
def erase(self, *args):
|
||||
r"""
|
||||
erase(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::iterator pos) -> std::list< Seiscomp::TravelTime >::iterator
|
||||
erase(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::iterator first, std::list< Seiscomp::TravelTime >::iterator last) -> std::list< Seiscomp::TravelTime >::iterator
|
||||
"""
|
||||
return _seismology.TravelTimeList_internal_erase(self, *args)
|
||||
|
||||
def __init__(self, *args):
|
||||
r"""
|
||||
__init__(TravelTimeList_internal self) -> TravelTimeList_internal
|
||||
__init__(TravelTimeList_internal self, TravelTimeList_internal other) -> TravelTimeList_internal
|
||||
__init__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::size_type size) -> TravelTimeList_internal
|
||||
__init__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::size_type size, TravelTime value) -> TravelTimeList_internal
|
||||
"""
|
||||
_seismology.TravelTimeList_internal_swiginit(self, _seismology.new_TravelTimeList_internal(*args))
|
||||
|
||||
def push_back(self, x):
|
||||
r"""push_back(TravelTimeList_internal self, TravelTime x)"""
|
||||
return _seismology.TravelTimeList_internal_push_back(self, x)
|
||||
|
||||
def front(self):
|
||||
r"""front(TravelTimeList_internal self) -> TravelTime"""
|
||||
return _seismology.TravelTimeList_internal_front(self)
|
||||
|
||||
def back(self):
|
||||
r"""back(TravelTimeList_internal self) -> TravelTime"""
|
||||
return _seismology.TravelTimeList_internal_back(self)
|
||||
|
||||
def assign(self, n, x):
|
||||
r"""assign(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::size_type n, TravelTime x)"""
|
||||
return _seismology.TravelTimeList_internal_assign(self, n, x)
|
||||
|
||||
def resize(self, *args):
|
||||
r"""
|
||||
resize(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::size_type new_size)
|
||||
resize(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::size_type new_size, TravelTime x)
|
||||
"""
|
||||
return _seismology.TravelTimeList_internal_resize(self, *args)
|
||||
|
||||
def insert(self, *args):
|
||||
r"""
|
||||
insert(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::iterator pos, TravelTime x) -> std::list< Seiscomp::TravelTime >::iterator
|
||||
insert(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::iterator pos, std::list< Seiscomp::TravelTime >::size_type n, TravelTime x)
|
||||
"""
|
||||
return _seismology.TravelTimeList_internal_insert(self, *args)
|
||||
|
||||
def pop_front(self):
|
||||
r"""pop_front(TravelTimeList_internal self)"""
|
||||
return _seismology.TravelTimeList_internal_pop_front(self)
|
||||
|
||||
def push_front(self, x):
|
||||
r"""push_front(TravelTimeList_internal self, TravelTime x)"""
|
||||
return _seismology.TravelTimeList_internal_push_front(self, x)
|
||||
|
||||
def reverse(self):
|
||||
r"""reverse(TravelTimeList_internal self)"""
|
||||
return _seismology.TravelTimeList_internal_reverse(self)
|
||||
__swig_destroy__ = _seismology.delete_TravelTimeList_internal
|
||||
|
||||
# Register TravelTimeList_internal in _seismology:
|
||||
_seismology.TravelTimeList_internal_swigregister(TravelTimeList_internal)
|
||||
|
||||
class Regions(object):
|
||||
r"""Proxy of C++ Seiscomp::Regions class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
raise AttributeError("No constructor defined")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
@staticmethod
|
||||
def getFlinnEngdahlRegion(lat, lon):
|
||||
r"""getFlinnEngdahlRegion(double lat, double lon) -> std::string"""
|
||||
return _seismology.Regions_getFlinnEngdahlRegion(lat, lon)
|
||||
|
||||
@staticmethod
|
||||
def getFlinnEngdahlRegionsCount():
|
||||
r"""getFlinnEngdahlRegionsCount() -> int"""
|
||||
return _seismology.Regions_getFlinnEngdahlRegionsCount()
|
||||
|
||||
@staticmethod
|
||||
def getFlinnEngdahlRegionById(id):
|
||||
r"""getFlinnEngdahlRegionById(int id) -> std::string"""
|
||||
return _seismology.Regions_getFlinnEngdahlRegionById(id)
|
||||
|
||||
@staticmethod
|
||||
def load():
|
||||
r"""load()"""
|
||||
return _seismology.Regions_load()
|
||||
|
||||
@staticmethod
|
||||
def getRegionName(lat, lon):
|
||||
r"""getRegionName(double lat, double lon) -> std::string"""
|
||||
return _seismology.Regions_getRegionName(lat, lon)
|
||||
|
||||
@staticmethod
|
||||
def polyRegions():
|
||||
r"""polyRegions() -> Seiscomp::Geo::PolyRegions &"""
|
||||
return _seismology.Regions_polyRegions()
|
||||
__swig_destroy__ = _seismology.delete_Regions
|
||||
|
||||
# Register Regions in _seismology:
|
||||
_seismology.Regions_swigregister(Regions)
|
||||
|
||||
def Regions_getFlinnEngdahlRegion(lat, lon):
|
||||
r"""Regions_getFlinnEngdahlRegion(double lat, double lon) -> std::string"""
|
||||
return _seismology.Regions_getFlinnEngdahlRegion(lat, lon)
|
||||
|
||||
def Regions_getFlinnEngdahlRegionsCount():
|
||||
r"""Regions_getFlinnEngdahlRegionsCount() -> int"""
|
||||
return _seismology.Regions_getFlinnEngdahlRegionsCount()
|
||||
|
||||
def Regions_getFlinnEngdahlRegionById(id):
|
||||
r"""Regions_getFlinnEngdahlRegionById(int id) -> std::string"""
|
||||
return _seismology.Regions_getFlinnEngdahlRegionById(id)
|
||||
|
||||
def Regions_load():
|
||||
r"""Regions_load()"""
|
||||
return _seismology.Regions_load()
|
||||
|
||||
def Regions_getRegionName(lat, lon):
|
||||
r"""Regions_getRegionName(double lat, double lon) -> std::string"""
|
||||
return _seismology.Regions_getRegionName(lat, lon)
|
||||
|
||||
def Regions_polyRegions():
|
||||
r"""Regions_polyRegions() -> Seiscomp::Geo::PolyRegions &"""
|
||||
return _seismology.Regions_polyRegions()
|
||||
|
||||
SC3_LOCATOR_INTERFACE_VERSION = _seismology.SC3_LOCATOR_INTERFACE_VERSION
|
||||
|
||||
class SensorLocationDelegate(seiscomp.core.BaseObject):
|
||||
r"""Proxy of C++ Seiscomp::Seismology::SensorLocationDelegate class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
raise AttributeError("No constructor defined - class is abstract")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def getSensorLocation(self, pick):
|
||||
r"""getSensorLocation(SensorLocationDelegate self, Pick pick) -> SensorLocation"""
|
||||
return _seismology.SensorLocationDelegate_getSensorLocation(self, pick)
|
||||
__swig_destroy__ = _seismology.delete_SensorLocationDelegate
|
||||
|
||||
# Register SensorLocationDelegate in _seismology:
|
||||
_seismology.SensorLocationDelegate_swigregister(SensorLocationDelegate)
|
||||
|
||||
class LocatorInterface(seiscomp.core.BaseObject):
|
||||
r"""Proxy of C++ Seiscomp::Seismology::LocatorInterface class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
raise AttributeError("No constructor defined - class is abstract")
|
||||
__repr__ = _swig_repr
|
||||
F_NONE = _seismology.LocatorInterface_F_NONE
|
||||
|
||||
F_BACKAZIMUTH = _seismology.LocatorInterface_F_BACKAZIMUTH
|
||||
|
||||
F_SLOWNESS = _seismology.LocatorInterface_F_SLOWNESS
|
||||
|
||||
F_TIME = _seismology.LocatorInterface_F_TIME
|
||||
|
||||
F_ALL = _seismology.LocatorInterface_F_ALL
|
||||
|
||||
EFlagsQuantity = _seismology.LocatorInterface_EFlagsQuantity
|
||||
|
||||
NoCapability = _seismology.LocatorInterface_NoCapability
|
||||
|
||||
InitialLocation = _seismology.LocatorInterface_InitialLocation
|
||||
|
||||
FixedDepth = _seismology.LocatorInterface_FixedDepth
|
||||
|
||||
DistanceCutOff = _seismology.LocatorInterface_DistanceCutOff
|
||||
|
||||
IgnoreInitialLocation = _seismology.LocatorInterface_IgnoreInitialLocation
|
||||
|
||||
CapQuantity = _seismology.LocatorInterface_CapQuantity
|
||||
|
||||
Log = _seismology.LocatorInterface_Log
|
||||
|
||||
Warning = _seismology.LocatorInterface_Warning
|
||||
|
||||
__swig_destroy__ = _seismology.delete_LocatorInterface
|
||||
|
||||
@staticmethod
|
||||
def Create(algo):
|
||||
r"""Create(char const * algo) -> LocatorInterface"""
|
||||
return _seismology.LocatorInterface_Create(algo)
|
||||
|
||||
def name(self):
|
||||
r"""name(LocatorInterface self) -> std::string const &"""
|
||||
return _seismology.LocatorInterface_name(self)
|
||||
|
||||
def setSensorLocationDelegate(self, delegate):
|
||||
r"""setSensorLocationDelegate(LocatorInterface self, SensorLocationDelegate delegate)"""
|
||||
return _seismology.LocatorInterface_setSensorLocationDelegate(self, delegate)
|
||||
|
||||
def init(self, config):
|
||||
r"""init(LocatorInterface self, Config config) -> bool"""
|
||||
return _seismology.LocatorInterface_init(self, config)
|
||||
|
||||
def parameters(self):
|
||||
r"""parameters(LocatorInterface self) -> VectorStr"""
|
||||
return _seismology.LocatorInterface_parameters(self)
|
||||
|
||||
def parameter(self, name):
|
||||
r"""parameter(LocatorInterface self, std::string const & name) -> std::string"""
|
||||
return _seismology.LocatorInterface_parameter(self, name)
|
||||
|
||||
def setParameter(self, name, value):
|
||||
r"""setParameter(LocatorInterface self, std::string const & name, std::string const & value) -> bool"""
|
||||
return _seismology.LocatorInterface_setParameter(self, name, value)
|
||||
|
||||
def profiles(self):
|
||||
r"""profiles(LocatorInterface self) -> VectorStr"""
|
||||
return _seismology.LocatorInterface_profiles(self)
|
||||
|
||||
def setProfile(self, name):
|
||||
r"""setProfile(LocatorInterface self, std::string const & name)"""
|
||||
return _seismology.LocatorInterface_setProfile(self, name)
|
||||
|
||||
def capabilities(self):
|
||||
r"""capabilities(LocatorInterface self) -> int"""
|
||||
return _seismology.LocatorInterface_capabilities(self)
|
||||
|
||||
def locate(self, *args):
|
||||
r"""
|
||||
locate(LocatorInterface self, Seiscomp::Seismology::LocatorInterface::PickList & pickList) -> Origin
|
||||
locate(LocatorInterface self, Seiscomp::Seismology::LocatorInterface::PickList & pickList, double initLat, double initLon, double initDepth, Time initTime) -> Origin
|
||||
"""
|
||||
return _seismology.LocatorInterface_locate(self, *args)
|
||||
|
||||
def relocate(self, origin):
|
||||
r"""relocate(LocatorInterface self, Origin origin) -> Origin"""
|
||||
return _seismology.LocatorInterface_relocate(self, origin)
|
||||
|
||||
def lastMessage(self, arg2):
|
||||
r"""lastMessage(LocatorInterface self, Seiscomp::Seismology::LocatorInterface::MessageType arg2) -> std::string"""
|
||||
return _seismology.LocatorInterface_lastMessage(self, arg2)
|
||||
|
||||
def supports(self, arg2):
|
||||
r"""supports(LocatorInterface self, Seiscomp::Seismology::LocatorInterface::Capability arg2) -> bool"""
|
||||
return _seismology.LocatorInterface_supports(self, arg2)
|
||||
|
||||
def setFixedDepth(self, depth, use=True):
|
||||
r"""setFixedDepth(LocatorInterface self, double depth, bool use=True)"""
|
||||
return _seismology.LocatorInterface_setFixedDepth(self, depth, use)
|
||||
|
||||
def useFixedDepth(self, use=True):
|
||||
r"""useFixedDepth(LocatorInterface self, bool use=True)"""
|
||||
return _seismology.LocatorInterface_useFixedDepth(self, use)
|
||||
|
||||
def fixedDepth(self):
|
||||
r"""fixedDepth(LocatorInterface self) -> double"""
|
||||
return _seismology.LocatorInterface_fixedDepth(self)
|
||||
|
||||
def usingFixedDepth(self):
|
||||
r"""usingFixedDepth(LocatorInterface self) -> bool"""
|
||||
return _seismology.LocatorInterface_usingFixedDepth(self)
|
||||
|
||||
def releaseDepth(self):
|
||||
r"""releaseDepth(LocatorInterface self)"""
|
||||
return _seismology.LocatorInterface_releaseDepth(self)
|
||||
|
||||
def setDistanceCutOff(self, distance):
|
||||
r"""setDistanceCutOff(LocatorInterface self, double distance)"""
|
||||
return _seismology.LocatorInterface_setDistanceCutOff(self, distance)
|
||||
|
||||
def releaseDistanceCutOff(self):
|
||||
r"""releaseDistanceCutOff(LocatorInterface self)"""
|
||||
return _seismology.LocatorInterface_releaseDistanceCutOff(self)
|
||||
|
||||
def isInitialLocationIgnored(self):
|
||||
r"""isInitialLocationIgnored(LocatorInterface self) -> bool"""
|
||||
return _seismology.LocatorInterface_isInitialLocationIgnored(self)
|
||||
|
||||
def setIgnoreInitialLocation(self, f):
|
||||
r"""setIgnoreInitialLocation(LocatorInterface self, bool f)"""
|
||||
return _seismology.LocatorInterface_setIgnoreInitialLocation(self, f)
|
||||
|
||||
def getPick(self, arrival):
|
||||
r"""getPick(LocatorInterface self, Arrival arrival) -> Pick"""
|
||||
return _seismology.LocatorInterface_getPick(self, arrival)
|
||||
|
||||
def getSensorLocation(self, pick):
|
||||
r"""getSensorLocation(LocatorInterface self, Pick pick) -> SensorLocation"""
|
||||
return _seismology.LocatorInterface_getSensorLocation(self, pick)
|
||||
|
||||
# Register LocatorInterface in _seismology:
|
||||
_seismology.LocatorInterface_swigregister(LocatorInterface)
|
||||
|
||||
def LocatorInterface_Create(algo):
|
||||
r"""LocatorInterface_Create(char const * algo) -> LocatorInterface"""
|
||||
return _seismology.LocatorInterface_Create(algo)
|
||||
|
||||
class PickNotFoundException(seiscomp.core.GeneralException):
|
||||
r"""Proxy of C++ Seiscomp::Seismology::PickNotFoundException class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
r"""
|
||||
__init__(PickNotFoundException self) -> PickNotFoundException
|
||||
__init__(PickNotFoundException self, std::string const & str) -> PickNotFoundException
|
||||
"""
|
||||
_seismology.PickNotFoundException_swiginit(self, _seismology.new_PickNotFoundException(*args))
|
||||
__swig_destroy__ = _seismology.delete_PickNotFoundException
|
||||
|
||||
# Register PickNotFoundException in _seismology:
|
||||
_seismology.PickNotFoundException_swigregister(PickNotFoundException)
|
||||
|
||||
class LocatorException(seiscomp.core.GeneralException):
|
||||
r"""Proxy of C++ Seiscomp::Seismology::LocatorException class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
r"""
|
||||
__init__(LocatorException self) -> LocatorException
|
||||
__init__(LocatorException self, std::string const & str) -> LocatorException
|
||||
"""
|
||||
_seismology.LocatorException_swiginit(self, _seismology.new_LocatorException(*args))
|
||||
__swig_destroy__ = _seismology.delete_LocatorException
|
||||
|
||||
# Register LocatorException in _seismology:
|
||||
_seismology.LocatorException_swigregister(LocatorException)
|
||||
|
||||
class StationNotFoundException(seiscomp.core.GeneralException):
|
||||
r"""Proxy of C++ Seiscomp::Seismology::StationNotFoundException class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
r"""
|
||||
__init__(StationNotFoundException self) -> StationNotFoundException
|
||||
__init__(StationNotFoundException self, std::string const & str) -> StationNotFoundException
|
||||
"""
|
||||
_seismology.StationNotFoundException_swiginit(self, _seismology.new_StationNotFoundException(*args))
|
||||
__swig_destroy__ = _seismology.delete_StationNotFoundException
|
||||
|
||||
# Register StationNotFoundException in _seismology:
|
||||
_seismology.StationNotFoundException_swigregister(StationNotFoundException)
|
||||
|
||||
|
||||
def arrivalToFlags(arrival):
|
||||
r"""arrivalToFlags(Arrival arrival) -> int"""
|
||||
return _seismology.arrivalToFlags(arrival)
|
||||
|
||||
def flagsToArrival(arrival, flags):
|
||||
r"""flagsToArrival(Arrival arrival, int flags)"""
|
||||
return _seismology.flagsToArrival(arrival, flags)
|
||||
class FileNotFoundError(seiscomp.core.GeneralException):
|
||||
r"""Proxy of C++ Seiscomp::FileNotFoundError class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, filename):
|
||||
r"""__init__(FileNotFoundError self, std::string const & filename) -> FileNotFoundError"""
|
||||
_seismology.FileNotFoundError_swiginit(self, _seismology.new_FileNotFoundError(filename))
|
||||
__swig_destroy__ = _seismology.delete_FileNotFoundError
|
||||
|
||||
# Register FileNotFoundError in _seismology:
|
||||
_seismology.FileNotFoundError_swigregister(FileNotFoundError)
|
||||
|
||||
class MultipleModelsError(seiscomp.core.GeneralException):
|
||||
r"""Proxy of C++ Seiscomp::MultipleModelsError class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, model):
|
||||
r"""__init__(MultipleModelsError self, std::string const & model) -> MultipleModelsError"""
|
||||
_seismology.MultipleModelsError_swiginit(self, _seismology.new_MultipleModelsError(model))
|
||||
__swig_destroy__ = _seismology.delete_MultipleModelsError
|
||||
|
||||
# Register MultipleModelsError in _seismology:
|
||||
_seismology.MultipleModelsError_swigregister(MultipleModelsError)
|
||||
|
||||
class NoPhaseError(seiscomp.core.GeneralException):
|
||||
r"""Proxy of C++ Seiscomp::NoPhaseError class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self):
|
||||
r"""__init__(NoPhaseError self) -> NoPhaseError"""
|
||||
_seismology.NoPhaseError_swiginit(self, _seismology.new_NoPhaseError())
|
||||
__swig_destroy__ = _seismology.delete_NoPhaseError
|
||||
|
||||
# Register NoPhaseError in _seismology:
|
||||
_seismology.NoPhaseError_swigregister(NoPhaseError)
|
||||
|
||||
class TravelTime(object):
|
||||
r"""Proxy of C++ Seiscomp::TravelTime class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
r"""
|
||||
__init__(TravelTime self) -> TravelTime
|
||||
__init__(TravelTime self, std::string const & _phase, double _time, double _dtdd, double _dtdh, double _dddp, double _takeoff) -> TravelTime
|
||||
"""
|
||||
_seismology.TravelTime_swiginit(self, _seismology.new_TravelTime(*args))
|
||||
|
||||
def __eq__(self, other):
|
||||
r"""__eq__(TravelTime self, TravelTime other) -> bool"""
|
||||
return _seismology.TravelTime___eq__(self, other)
|
||||
|
||||
def __lt__(self, other):
|
||||
r"""__lt__(TravelTime self, TravelTime other) -> bool"""
|
||||
return _seismology.TravelTime___lt__(self, other)
|
||||
phase = property(_seismology.TravelTime_phase_get, _seismology.TravelTime_phase_set, doc=r"""phase : std::string""")
|
||||
time = property(_seismology.TravelTime_time_get, _seismology.TravelTime_time_set, doc=r"""time : double""")
|
||||
dtdd = property(_seismology.TravelTime_dtdd_get, _seismology.TravelTime_dtdd_set, doc=r"""dtdd : double""")
|
||||
dtdh = property(_seismology.TravelTime_dtdh_get, _seismology.TravelTime_dtdh_set, doc=r"""dtdh : double""")
|
||||
dddp = property(_seismology.TravelTime_dddp_get, _seismology.TravelTime_dddp_set, doc=r"""dddp : double""")
|
||||
takeoff = property(_seismology.TravelTime_takeoff_get, _seismology.TravelTime_takeoff_set, doc=r"""takeoff : double""")
|
||||
azi = property(_seismology.TravelTime_azi_get, _seismology.TravelTime_azi_set, doc=r"""azi : Seiscomp::Core::Optional<(double)>::Impl""")
|
||||
__swig_destroy__ = _seismology.delete_TravelTime
|
||||
|
||||
# Register TravelTime in _seismology:
|
||||
_seismology.TravelTime_swigregister(TravelTime)
|
||||
|
||||
class TravelTimeList(TravelTimeList_internal):
|
||||
r"""Proxy of C++ Seiscomp::TravelTimeList class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def isEmpty(self):
|
||||
r"""isEmpty(TravelTimeList self) -> bool"""
|
||||
return _seismology.TravelTimeList_isEmpty(self)
|
||||
|
||||
def sortByTime(self):
|
||||
r"""sortByTime(TravelTimeList self)"""
|
||||
return _seismology.TravelTimeList_sortByTime(self)
|
||||
depth = property(_seismology.TravelTimeList_depth_get, _seismology.TravelTimeList_depth_set, doc=r"""depth : double""")
|
||||
delta = property(_seismology.TravelTimeList_delta_get, _seismology.TravelTimeList_delta_set, doc=r"""delta : double""")
|
||||
|
||||
def __init__(self):
|
||||
r"""__init__(TravelTimeList self) -> TravelTimeList"""
|
||||
_seismology.TravelTimeList_swiginit(self, _seismology.new_TravelTimeList())
|
||||
__swig_destroy__ = _seismology.delete_TravelTimeList
|
||||
|
||||
# Register TravelTimeList in _seismology:
|
||||
_seismology.TravelTimeList_swigregister(TravelTimeList)
|
||||
|
||||
class TravelTimeTableInterface(seiscomp.core.BaseObject):
|
||||
r"""Proxy of C++ Seiscomp::TravelTimeTableInterface class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
raise AttributeError("No constructor defined - class is abstract")
|
||||
__repr__ = _swig_repr
|
||||
__swig_destroy__ = _seismology.delete_TravelTimeTableInterface
|
||||
|
||||
@staticmethod
|
||||
def Create(name):
|
||||
r"""Create(char const * name) -> TravelTimeTableInterface"""
|
||||
return _seismology.TravelTimeTableInterface_Create(name)
|
||||
|
||||
def setModel(self, model):
|
||||
r"""setModel(TravelTimeTableInterface self, std::string const & model) -> bool"""
|
||||
return _seismology.TravelTimeTableInterface_setModel(self, model)
|
||||
|
||||
def model(self):
|
||||
r"""model(TravelTimeTableInterface self) -> std::string const &"""
|
||||
return _seismology.TravelTimeTableInterface_model(self)
|
||||
|
||||
def compute(self, *args):
|
||||
r"""
|
||||
compute(TravelTimeTableInterface self, double lat1, double lon1, double dep1, double lat2, double lon2, double elev2=0., int ellc=1) -> TravelTimeList
|
||||
compute(TravelTimeTableInterface self, char const * phase, double lat1, double lon1, double dep1, double lat2, double lon2, double elev2=0., int ellc=1) -> TravelTime
|
||||
"""
|
||||
return _seismology.TravelTimeTableInterface_compute(self, *args)
|
||||
|
||||
def computeFirst(self, lat1, lon1, dep1, lat2, lon2, elev2=0., ellc=1):
|
||||
r"""computeFirst(TravelTimeTableInterface self, double lat1, double lon1, double dep1, double lat2, double lon2, double elev2=0., int ellc=1) -> TravelTime"""
|
||||
return _seismology.TravelTimeTableInterface_computeFirst(self, lat1, lon1, dep1, lat2, lon2, elev2, ellc)
|
||||
|
||||
def computeTime(self, phase, lat1, lon1, dep1, lat2, lon2, elev2=0., ellc=1):
|
||||
r"""computeTime(TravelTimeTableInterface self, char const * phase, double lat1, double lon1, double dep1, double lat2, double lon2, double elev2=0., int ellc=1) -> double"""
|
||||
return _seismology.TravelTimeTableInterface_computeTime(self, phase, lat1, lon1, dep1, lat2, lon2, elev2, ellc)
|
||||
|
||||
# Register TravelTimeTableInterface in _seismology:
|
||||
_seismology.TravelTimeTableInterface_swigregister(TravelTimeTableInterface)
|
||||
|
||||
def TravelTimeTableInterface_Create(name):
|
||||
r"""TravelTimeTableInterface_Create(char const * name) -> TravelTimeTableInterface"""
|
||||
return _seismology.TravelTimeTableInterface_Create(name)
|
||||
|
||||
class TravelTimeTable(TravelTimeTableInterface):
|
||||
r"""Proxy of C++ Seiscomp::TravelTimeTable class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self):
|
||||
r"""__init__(TravelTimeTable self) -> TravelTimeTable"""
|
||||
_seismology.TravelTimeTable_swiginit(self, _seismology.new_TravelTimeTable())
|
||||
|
||||
def setModel(self, model):
|
||||
r"""setModel(TravelTimeTable self, std::string const & model) -> bool"""
|
||||
return _seismology.TravelTimeTable_setModel(self, model)
|
||||
|
||||
def model(self):
|
||||
r"""model(TravelTimeTable self) -> std::string const &"""
|
||||
return _seismology.TravelTimeTable_model(self)
|
||||
|
||||
def compute(self, *args):
|
||||
r"""
|
||||
compute(TravelTimeTable self, double lat1, double lon1, double dep1, double lat2, double lon2, double elev2=0., int ellc=1) -> TravelTimeList
|
||||
compute(TravelTimeTable self, char const * phase, double lat1, double lon1, double dep1, double lat2, double lon2, double elev2=0., int ellc=1) -> TravelTime
|
||||
"""
|
||||
return _seismology.TravelTimeTable_compute(self, *args)
|
||||
|
||||
def computeFirst(self, lat1, lon1, dep1, lat2, lon2, elev2=0., ellc=1):
|
||||
r"""computeFirst(TravelTimeTable self, double lat1, double lon1, double dep1, double lat2, double lon2, double elev2=0., int ellc=1) -> TravelTime"""
|
||||
return _seismology.TravelTimeTable_computeFirst(self, lat1, lon1, dep1, lat2, lon2, elev2, ellc)
|
||||
__swig_destroy__ = _seismology.delete_TravelTimeTable
|
||||
|
||||
# Register TravelTimeTable in _seismology:
|
||||
_seismology.TravelTimeTable_swigregister(TravelTimeTable)
|
||||
|
||||
|
||||
def ellipcorr(phase, lat1, lon1, lat2, lon2, depth, corr):
|
||||
r"""ellipcorr(std::string const & phase, double lat1, double lon1, double lat2, double lon2, double depth, double & corr) -> bool"""
|
||||
return _seismology.ellipcorr(phase, lat1, lon1, lat2, lon2, depth, corr)
|
||||
|
||||
def getPhase(arg1, phaseCode):
|
||||
r"""getPhase(TravelTimeList arg1, std::string const & phaseCode) -> TravelTime"""
|
||||
return _seismology.getPhase(arg1, phaseCode)
|
||||
|
||||
def firstArrivalP(arg1):
|
||||
r"""firstArrivalP(TravelTimeList arg1) -> TravelTime"""
|
||||
return _seismology.firstArrivalP(arg1)
|
||||
class LocSATErrorEllipsoid(object):
|
||||
r"""Proxy of C++ Seiscomp::LocSATErrorEllipsoid class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self):
|
||||
r"""__init__(LocSATErrorEllipsoid self) -> LocSATErrorEllipsoid"""
|
||||
_seismology.LocSATErrorEllipsoid_swiginit(self, _seismology.new_LocSATErrorEllipsoid())
|
||||
sxx = property(_seismology.LocSATErrorEllipsoid_sxx_get, _seismology.LocSATErrorEllipsoid_sxx_set, doc=r"""sxx : float""")
|
||||
syy = property(_seismology.LocSATErrorEllipsoid_syy_get, _seismology.LocSATErrorEllipsoid_syy_set, doc=r"""syy : float""")
|
||||
szz = property(_seismology.LocSATErrorEllipsoid_szz_get, _seismology.LocSATErrorEllipsoid_szz_set, doc=r"""szz : float""")
|
||||
stt = property(_seismology.LocSATErrorEllipsoid_stt_get, _seismology.LocSATErrorEllipsoid_stt_set, doc=r"""stt : float""")
|
||||
sxy = property(_seismology.LocSATErrorEllipsoid_sxy_get, _seismology.LocSATErrorEllipsoid_sxy_set, doc=r"""sxy : float""")
|
||||
sxz = property(_seismology.LocSATErrorEllipsoid_sxz_get, _seismology.LocSATErrorEllipsoid_sxz_set, doc=r"""sxz : float""")
|
||||
syz = property(_seismology.LocSATErrorEllipsoid_syz_get, _seismology.LocSATErrorEllipsoid_syz_set, doc=r"""syz : float""")
|
||||
stx = property(_seismology.LocSATErrorEllipsoid_stx_get, _seismology.LocSATErrorEllipsoid_stx_set, doc=r"""stx : float""")
|
||||
sty = property(_seismology.LocSATErrorEllipsoid_sty_get, _seismology.LocSATErrorEllipsoid_sty_set, doc=r"""sty : float""")
|
||||
stz = property(_seismology.LocSATErrorEllipsoid_stz_get, _seismology.LocSATErrorEllipsoid_stz_set, doc=r"""stz : float""")
|
||||
sdobs = property(_seismology.LocSATErrorEllipsoid_sdobs_get, _seismology.LocSATErrorEllipsoid_sdobs_set, doc=r"""sdobs : float""")
|
||||
smajax = property(_seismology.LocSATErrorEllipsoid_smajax_get, _seismology.LocSATErrorEllipsoid_smajax_set, doc=r"""smajax : float""")
|
||||
sminax = property(_seismology.LocSATErrorEllipsoid_sminax_get, _seismology.LocSATErrorEllipsoid_sminax_set, doc=r"""sminax : float""")
|
||||
strike = property(_seismology.LocSATErrorEllipsoid_strike_get, _seismology.LocSATErrorEllipsoid_strike_set, doc=r"""strike : float""")
|
||||
sdepth = property(_seismology.LocSATErrorEllipsoid_sdepth_get, _seismology.LocSATErrorEllipsoid_sdepth_set, doc=r"""sdepth : float""")
|
||||
stime = property(_seismology.LocSATErrorEllipsoid_stime_get, _seismology.LocSATErrorEllipsoid_stime_set, doc=r"""stime : float""")
|
||||
conf = property(_seismology.LocSATErrorEllipsoid_conf_get, _seismology.LocSATErrorEllipsoid_conf_set, doc=r"""conf : float""")
|
||||
__swig_destroy__ = _seismology.delete_LocSATErrorEllipsoid
|
||||
|
||||
# Register LocSATErrorEllipsoid in _seismology:
|
||||
_seismology.LocSATErrorEllipsoid_swigregister(LocSATErrorEllipsoid)
|
||||
|
||||
class LocSAT(LocatorInterface):
|
||||
r"""Proxy of C++ Seiscomp::LocSAT class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self):
|
||||
r"""__init__(LocSAT self) -> LocSAT"""
|
||||
_seismology.LocSAT_swiginit(self, _seismology.new_LocSAT())
|
||||
__swig_destroy__ = _seismology.delete_LocSAT
|
||||
|
||||
def init(self, config):
|
||||
r"""init(LocSAT self, Config config) -> bool"""
|
||||
return _seismology.LocSAT_init(self, config)
|
||||
|
||||
def parameters(self):
|
||||
r"""parameters(LocSAT self) -> VectorStr"""
|
||||
return _seismology.LocSAT_parameters(self)
|
||||
|
||||
def parameter(self, name):
|
||||
r"""parameter(LocSAT self, std::string const & name) -> std::string"""
|
||||
return _seismology.LocSAT_parameter(self, name)
|
||||
|
||||
def setParameter(self, name, value):
|
||||
r"""setParameter(LocSAT self, std::string const & name, std::string const & value) -> bool"""
|
||||
return _seismology.LocSAT_setParameter(self, name, value)
|
||||
|
||||
def profiles(self):
|
||||
r"""profiles(LocSAT self) -> VectorStr"""
|
||||
return _seismology.LocSAT_profiles(self)
|
||||
|
||||
def setProfile(self, name):
|
||||
r"""setProfile(LocSAT self, std::string const & name)"""
|
||||
return _seismology.LocSAT_setProfile(self, name)
|
||||
|
||||
@staticmethod
|
||||
def setDefaultProfile(name):
|
||||
r"""setDefaultProfile(std::string const & name)"""
|
||||
return _seismology.LocSAT_setDefaultProfile(name)
|
||||
|
||||
@staticmethod
|
||||
def currentDefaultProfile():
|
||||
r"""currentDefaultProfile() -> std::string"""
|
||||
return _seismology.LocSAT_currentDefaultProfile()
|
||||
|
||||
def setNewOriginID(self, newOriginID):
|
||||
r"""setNewOriginID(LocSAT self, std::string const & newOriginID)"""
|
||||
return _seismology.LocSAT_setNewOriginID(self, newOriginID)
|
||||
|
||||
def capabilities(self):
|
||||
r"""capabilities(LocSAT self) -> int"""
|
||||
return _seismology.LocSAT_capabilities(self)
|
||||
|
||||
def locate(self, *args):
|
||||
r"""
|
||||
locate(LocSAT self, Seiscomp::Seismology::LocatorInterface::PickList & pickList) -> Origin
|
||||
locate(LocSAT self, Seiscomp::Seismology::LocatorInterface::PickList & pickList, double initLat, double initLon, double initDepth, Time initTime) -> Origin
|
||||
"""
|
||||
return _seismology.LocSAT_locate(self, *args)
|
||||
|
||||
def relocate(self, origin):
|
||||
r"""relocate(LocSAT self, Origin origin) -> Origin"""
|
||||
return _seismology.LocSAT_relocate(self, origin)
|
||||
|
||||
def errorEllipsoid(self):
|
||||
r"""errorEllipsoid(LocSAT self) -> LocSATErrorEllipsoid"""
|
||||
return _seismology.LocSAT_errorEllipsoid(self)
|
||||
|
||||
# Register LocSAT in _seismology:
|
||||
_seismology.LocSAT_swigregister(LocSAT)
|
||||
|
||||
def LocSAT_setDefaultProfile(name):
|
||||
r"""LocSAT_setDefaultProfile(std::string const & name)"""
|
||||
return _seismology.LocSAT_setDefaultProfile(name)
|
||||
|
||||
def LocSAT_currentDefaultProfile():
|
||||
r"""LocSAT_currentDefaultProfile() -> std::string"""
|
||||
return _seismology.LocSAT_currentDefaultProfile()
|
||||
|
||||
|
||||
|
509
lib/python/seiscomp/setup.py
Normal file
509
lib/python/seiscomp/setup.py
Normal file
@ -0,0 +1,509 @@
|
||||
############################################################################
|
||||
# Copyright (C) by gempa GmbH, GFZ Potsdam #
|
||||
# #
|
||||
# You can redistribute and/or modify this program under the #
|
||||
# terms of the SeisComP Public License. #
|
||||
# #
|
||||
# This program is distributed in the hope that it will be useful, #
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
|
||||
# SeisComP Public License for more details. #
|
||||
############################################################################
|
||||
|
||||
import os
|
||||
import sys
|
||||
import glob
|
||||
import getpass
|
||||
|
||||
try:
|
||||
# Python 2.5
|
||||
from xml.etree import ElementTree
|
||||
from xml.parsers.expat import ExpatError as ParseError
|
||||
except ImportError:
|
||||
from elementtree import ElementTree
|
||||
from xml.parsers.expat import ExpatError as ParseError
|
||||
|
||||
from seiscomp import config
|
||||
|
||||
# Python version depended string conversion
|
||||
if sys.version_info[0] < 3:
|
||||
py3input = raw_input #pylint: disable=E0602
|
||||
else:
|
||||
py3input = input
|
||||
|
||||
|
||||
def tagname(element):
|
||||
names = element.tag.split("}")
|
||||
if len(names) == 0:
|
||||
return ""
|
||||
|
||||
return names.pop()
|
||||
|
||||
|
||||
def oneliner(txt):
|
||||
return txt.strip().replace("\n", "")
|
||||
|
||||
|
||||
def block(txt, width=80):
|
||||
lines = [l.strip() for l in txt.strip().replace("\r", "").split('\n')]
|
||||
line = "\n".join(lines)
|
||||
|
||||
current = 0
|
||||
lines = []
|
||||
|
||||
while current < len(line):
|
||||
end = line.find('\n', current)
|
||||
if (end == -1) or (end - current > width):
|
||||
if len(line) - current > width:
|
||||
end = line.rfind(' ', current, current+width)
|
||||
if end == -1:
|
||||
end = line.find(' ', current)
|
||||
if end == -1:
|
||||
end = len(line)
|
||||
else:
|
||||
end = len(line)
|
||||
|
||||
lines.append(line[current:end].strip())
|
||||
|
||||
current = end + 1
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
class SetupNode:
|
||||
def __init__(self, parent, inp, next = None):
|
||||
self.parent = parent
|
||||
self.next = next
|
||||
self.child = None
|
||||
self.activeChild = None
|
||||
|
||||
self.modname = ""
|
||||
self.groupname = ""
|
||||
self.input = inp
|
||||
self.value = ""
|
||||
self.path = ""
|
||||
self.optionValue = None
|
||||
self.isOption = False
|
||||
|
||||
|
||||
class Option:
|
||||
"""
|
||||
Setup input option wrapper.
|
||||
"""
|
||||
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
self.desc = None
|
||||
self.inputs = []
|
||||
|
||||
|
||||
class Input:
|
||||
"""
|
||||
Setup input wrapper.
|
||||
"""
|
||||
|
||||
def __init__(self, name, t, default_value=None):
|
||||
self.name = name
|
||||
self.type = t
|
||||
self.default_value = default_value
|
||||
self.text = None
|
||||
self.desc = None
|
||||
self.echo = None
|
||||
self.options = []
|
||||
|
||||
|
||||
def dumpTree(cfg, node):
|
||||
if node.input:
|
||||
cfg.setString(node.modname + "." + node.path, node.value)
|
||||
|
||||
if node.activeChild:
|
||||
if node.isOption:
|
||||
dumpTree(cfg, node.activeChild.child)
|
||||
else:
|
||||
dumpTree(cfg, node.activeChild)
|
||||
|
||||
if not node.next is None:
|
||||
dumpTree(cfg, node.next)
|
||||
|
||||
|
||||
class Simple:
|
||||
"""
|
||||
Simple console setup handler that parses all description xml files
|
||||
and extracts the setup part. It asks for all available setting line
|
||||
by line and passes the resulting configuration back which is then
|
||||
passed to all init modules that have a setup method.
|
||||
"""
|
||||
|
||||
def __init__(self, args = []):
|
||||
self.modules = args
|
||||
self.setupTree = SetupNode(None, None)
|
||||
self.paths = []
|
||||
self.currentNode = None
|
||||
|
||||
def run(self, env):
|
||||
desc_pattern = os.path.join(
|
||||
env.SEISCOMP_ROOT, "etc", "descriptions", "*.xml")
|
||||
xmls = glob.glob(desc_pattern)
|
||||
|
||||
setup_groups = {}
|
||||
|
||||
for f in xmls:
|
||||
try:
|
||||
tree = ElementTree.parse(f)
|
||||
except ParseError as xxx_todo_changeme:
|
||||
(err) = xxx_todo_changeme
|
||||
sys.stderr.write("%s: parsing XML failed: %s\n" % (f, err))
|
||||
continue
|
||||
|
||||
root = tree.getroot()
|
||||
if tagname(root) != "seiscomp":
|
||||
sys.stderr.write(
|
||||
"%s: wrong root tag, expected 'seiscomp'\n" % f)
|
||||
continue
|
||||
|
||||
# Read all modules
|
||||
mods = tree.findall("module")
|
||||
|
||||
for mod in mods:
|
||||
modname = mod.get('name')
|
||||
if not modname:
|
||||
sys.stderr.write("%s: skipping module without name\n" % f)
|
||||
continue
|
||||
|
||||
if modname in setup_groups:
|
||||
raise Exception(
|
||||
"%s: duplicate module name: %s" % (f, modname))
|
||||
|
||||
if self.modules and modname not in self.modules:
|
||||
continue
|
||||
|
||||
setup = mod.find("setup")
|
||||
if setup is None:
|
||||
continue
|
||||
|
||||
groups = setup.findall("group")
|
||||
if len(groups) == 0:
|
||||
continue
|
||||
|
||||
setup_groups[modname] = groups
|
||||
|
||||
# Read all plugin's
|
||||
plugins = tree.findall("plugin")
|
||||
|
||||
for plugin in plugins:
|
||||
try:
|
||||
modname = plugin.find('extends').text.strip()
|
||||
except:
|
||||
raise Exception("%s: plugin does not define 'extends'" % f)
|
||||
|
||||
if modname.find('\n') >= 0:
|
||||
raise Exception("%s: wrong module name in plugin." \
|
||||
"extends: no newlines allowed" % f)
|
||||
|
||||
if not modname:
|
||||
sys.stderr.write("%s: skipping module without name\n" % f)
|
||||
continue
|
||||
|
||||
setup = plugin.find("setup")
|
||||
if setup is None:
|
||||
continue
|
||||
|
||||
groups = setup.findall("group")
|
||||
if len(groups) == 0:
|
||||
continue
|
||||
|
||||
if modname in setup_groups:
|
||||
setup_groups[modname] += groups
|
||||
else:
|
||||
setup_groups[modname] = groups
|
||||
|
||||
for name, groups in sorted(setup_groups.items()):
|
||||
self.addGroups(self.setupTree, name, groups)
|
||||
|
||||
# Always descend to the first child (if available)
|
||||
self.setupTree.activeChild = self.setupTree.child
|
||||
self.currentNode = self.setupTree.activeChild
|
||||
|
||||
sys.stdout.write('''
|
||||
====================================================================
|
||||
SeisComP setup
|
||||
====================================================================
|
||||
|
||||
This initializes the configuration of your installation.
|
||||
If you already made adjustments to the configuration files
|
||||
be warned that this setup will overwrite existing parameters
|
||||
with default values. This is not a configurator for all
|
||||
options of your setup but helps to setup initial standard values.
|
||||
|
||||
--------------------------------------------------------------------
|
||||
Hint: Entered values starting with a dot (.) are handled
|
||||
as commands. Available commands are:
|
||||
|
||||
quit: Quit setup without modification to your configuration.
|
||||
back: Go back to the previous parameter.
|
||||
help: Show help about the current parameter (if available).
|
||||
|
||||
If you need to enter a value with a leading dot, escape it
|
||||
with backslash, e.g. "\\.value".
|
||||
--------------------------------------------------------------------
|
||||
|
||||
''')
|
||||
|
||||
try:
|
||||
self.fillTree()
|
||||
except StopIteration:
|
||||
raise Exception("aborted by user")
|
||||
|
||||
cfg = config.Config()
|
||||
dumpTree(cfg, self.setupTree)
|
||||
|
||||
return cfg
|
||||
|
||||
def addGroups(self, node, modname, groups):
|
||||
for g in groups:
|
||||
self.addInputs(None, node, modname, g.get(
|
||||
'name'), g, g.get('name', "") + ".")
|
||||
|
||||
def addInputs(self, obj, parent, modname, group, xml, prefix):
|
||||
childs = parent.child;
|
||||
if not childs is None:
|
||||
while not childs.next is None:
|
||||
childs = childs.next
|
||||
|
||||
inputs = xml.findall("input")
|
||||
for inp in inputs:
|
||||
name = inp.get('name')
|
||||
if not name:
|
||||
raise Exception("%s: no name defined" % prefix)
|
||||
|
||||
input_ = Input(name, inp.get('type'), inp.get('default'))
|
||||
try:
|
||||
input_.text = oneliner(inp.find('text').text)
|
||||
except Exception:
|
||||
input_.text = input_.name
|
||||
|
||||
try:
|
||||
input_.desc = block(inp.find('description').text)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
input_.echo = inp.get('echo')
|
||||
|
||||
if obj:
|
||||
obj.inputs.append(input_)
|
||||
|
||||
opts = inp.findall("option")
|
||||
|
||||
node = SetupNode(parent, input_)
|
||||
node.path = prefix + input_.name
|
||||
node.value = input_.default_value
|
||||
node.modname = modname
|
||||
node.groupname = group
|
||||
node.isOption = len(opts) > 0
|
||||
|
||||
if childs is None:
|
||||
childs = node
|
||||
parent.child = childs
|
||||
else:
|
||||
childs.next = node
|
||||
childs = childs.next;
|
||||
|
||||
options = node.child
|
||||
|
||||
for opt in opts:
|
||||
value = opt.get('value')
|
||||
if not value:
|
||||
raise Exception("%s: option without value" % prefix)
|
||||
|
||||
optionNode = SetupNode(node, input_)
|
||||
optionNode.path = node.path + "." + value
|
||||
optionNode.modname = modname
|
||||
optionNode.groupname = group
|
||||
optionNode.isOption = False
|
||||
optionNode.optionValue = value
|
||||
|
||||
option = Option(value)
|
||||
try:
|
||||
option.desc = block(opt.find('description').text, 74)
|
||||
except Exception:
|
||||
pass
|
||||
input_.options.append(option)
|
||||
|
||||
if options is None:
|
||||
options = optionNode
|
||||
node.child = options
|
||||
else:
|
||||
options.next = optionNode
|
||||
options = options.next
|
||||
|
||||
self.addInputs(option, optionNode, modname,
|
||||
group, opt, node.path + ".")
|
||||
|
||||
def fillTree(self):
|
||||
while True:
|
||||
if not self.currentNode:
|
||||
sys.stdout.write("\nFinished setup\n--------------\n\n")
|
||||
sys.stdout.write("P) Proceed to apply configuration\n")
|
||||
sys.stdout.write("D) Dump entered parameters\n")
|
||||
sys.stdout.write("B) Back to last parameter\n")
|
||||
sys.stdout.write("Q) Quit without changes\n")
|
||||
|
||||
value = py3input('Command? [P]: ').upper()
|
||||
if value == "Q":
|
||||
raise StopIteration()
|
||||
if value == "D":
|
||||
sys.stdout.write("\n----\n")
|
||||
cfg = config.Config()
|
||||
dumpTree(cfg, self.setupTree)
|
||||
cfg.writeConfig("-")
|
||||
sys.stdout.write("----\n\n")
|
||||
continue
|
||||
if value == "P" or not value:
|
||||
sys.stdout.write("\nRunning setup\n-------------\n\n")
|
||||
return
|
||||
if value == "B":
|
||||
self.prevStep()
|
||||
continue
|
||||
|
||||
sys.stdout.write("\nEnter either p, b or q\n")
|
||||
continue
|
||||
|
||||
if not self.currentNode.input:
|
||||
self.nextStep()
|
||||
continue
|
||||
|
||||
default_value = self.valueToString(self.currentNode)
|
||||
|
||||
isChoice = False
|
||||
isPassword = False
|
||||
if self.currentNode.input.echo == "password":
|
||||
isPassword = True
|
||||
|
||||
node_text = default_value
|
||||
prompt = self.currentNode.input.text
|
||||
|
||||
if isPassword:
|
||||
node_text = '*' * len(node_text)
|
||||
prompt += " (input not echoed)"
|
||||
|
||||
if (not self.currentNode.input.type or \
|
||||
self.currentNode.input.type != "boolean") and \
|
||||
len(self.currentNode.input.options) > 0:
|
||||
idx = 0
|
||||
def_idx = 0
|
||||
for opt in self.currentNode.input.options:
|
||||
sys.stdout.write("%2d) %s\n" % (idx, opt.value))
|
||||
if opt.desc:
|
||||
for l in opt.desc:
|
||||
sys.stdout.write(" %s\n" % l)
|
||||
if default_value == opt.value:
|
||||
def_idx = idx
|
||||
idx += 1
|
||||
isChoice = True
|
||||
prompt += " [%d]: " % def_idx
|
||||
else:
|
||||
prompt += " [%s]: " % node_text
|
||||
|
||||
if self.currentNode.input.echo == "password":
|
||||
value = getpass.getpass(prompt)
|
||||
else:
|
||||
value = py3input(prompt)
|
||||
|
||||
if not value:
|
||||
value = default_value
|
||||
elif value == ".help":
|
||||
if self.currentNode.input.desc:
|
||||
sys.stdout.write("\n%s\n\n" %
|
||||
"\n".join(self.currentNode.input.desc))
|
||||
else:
|
||||
sys.stdout.write("\nSorry, no help available.\n\n")
|
||||
continue
|
||||
elif value == ".back":
|
||||
self.prevStep()
|
||||
continue
|
||||
elif value == ".quit":
|
||||
raise StopIteration()
|
||||
elif value.startswith("."):
|
||||
sys.stdout.write("Unknown command. Values starting with '.' are handled has commands such as\n"
|
||||
"'.help', '.quit' or '.back'. To use a leading dot in a value, escape it with '\'\n"
|
||||
"e.g. '\\.color'\n")
|
||||
continue
|
||||
else:
|
||||
# Replace leading \. with .
|
||||
if value.startswith('\\.'):
|
||||
value = value[1:]
|
||||
|
||||
if isChoice:
|
||||
try:
|
||||
idx = int(value)
|
||||
except ValueError:
|
||||
idx = -1
|
||||
if idx < 0 or idx >= len(self.currentNode.input.options):
|
||||
sys.stdout.write("\nEnter a number between 0 and %d\n\n" % (
|
||||
len(self.currentNode.input.options)-1))
|
||||
continue
|
||||
value = self.currentNode.input.options[idx].value
|
||||
|
||||
if self.currentNode.input.type and self.currentNode.input.type == "boolean":
|
||||
if not value in ["yes", "no"]:
|
||||
sys.stdout.write("Please enter 'yes' or 'no'\n")
|
||||
continue
|
||||
|
||||
if value == "yes":
|
||||
value = "true"
|
||||
else:
|
||||
value = "false"
|
||||
|
||||
self.currentNode.value = value
|
||||
self.nextStep()
|
||||
|
||||
@staticmethod
|
||||
def valueToString(node):
|
||||
if not node.input.type:
|
||||
if node.value is None:
|
||||
return ""
|
||||
return node.value
|
||||
|
||||
if node.input.type == "boolean":
|
||||
if node.value == "true":
|
||||
return "yes"
|
||||
if node.value == "false":
|
||||
return "no"
|
||||
return "yes"
|
||||
|
||||
if node.value is None:
|
||||
return ""
|
||||
return node.value
|
||||
|
||||
def prevStep(self):
|
||||
if len(self.paths) == 0:
|
||||
sys.stdout.write("No previous step available\n")
|
||||
return
|
||||
|
||||
self.currentNode = self.paths.pop()
|
||||
|
||||
def nextStep(self):
|
||||
self.currentNode.activeChild = None
|
||||
self.paths.append(self.currentNode)
|
||||
|
||||
# Choice?
|
||||
if self.currentNode.isOption:
|
||||
child = self.currentNode.child
|
||||
while not child is None:
|
||||
if child.optionValue == self.currentNode.value:
|
||||
if not child.child is None:
|
||||
self.currentNode.activeChild = child
|
||||
self.currentNode = child.child
|
||||
return
|
||||
|
||||
break
|
||||
child = child.next
|
||||
|
||||
next = self.currentNode.next
|
||||
while next is None and not self.currentNode.parent is None:
|
||||
self.currentNode = self.currentNode.parent
|
||||
if not self.currentNode.optionValue is None:
|
||||
continue
|
||||
next = self.currentNode.next
|
||||
|
||||
self.currentNode = next
|
645
lib/python/seiscomp/shell.py
Normal file
645
lib/python/seiscomp/shell.py
Normal file
@ -0,0 +1,645 @@
|
||||
############################################################################
|
||||
# Copyright (C) by gempa GmbH, GFZ Potsdam #
|
||||
# #
|
||||
# You can redistribute and/or modify this program under the #
|
||||
# terms of the SeisComP Public License. #
|
||||
# #
|
||||
# This program is distributed in the hope that it will be useful, #
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
|
||||
# SeisComP Public License for more details. #
|
||||
############################################################################
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
import glob
|
||||
|
||||
# Python version depended string conversion
|
||||
if sys.version_info[0] < 3:
|
||||
py3input = raw_input #pylint: disable=E0602
|
||||
else:
|
||||
py3input = input
|
||||
|
||||
|
||||
def split_tokens(line):
|
||||
return line.split()
|
||||
|
||||
|
||||
def convert_wildcard(s):
|
||||
wild = s.split(".")
|
||||
if len(wild) > 2:
|
||||
raise Exception("station selector: only one dot allowed")
|
||||
|
||||
# Add station wildcard if only network is given
|
||||
if len(wild) == 1:
|
||||
wild.append('*')
|
||||
|
||||
return '_'.join([x if x else '*' for x in wild])
|
||||
|
||||
|
||||
def convert_stations(s):
|
||||
toks = s.split(".")
|
||||
if len(toks) != 2:
|
||||
raise Exception("station: expected format: NET.STA")
|
||||
return '_'.join(toks)
|
||||
|
||||
|
||||
class CLI:
|
||||
"""
|
||||
Simple console shell.
|
||||
"""
|
||||
|
||||
def __init__(self, env = None):
|
||||
self.env = env
|
||||
|
||||
def run(self, env):
|
||||
self.env = env
|
||||
|
||||
sys.stdout.write('''\
|
||||
%s
|
||||
SeisComP shell
|
||||
%s
|
||||
|
||||
Welcome to the SeisComP interactive shell. You can get help about
|
||||
available commands with 'help'. 'exit' leaves the shell.
|
||||
|
||||
''' % (("="*80), ("="*80)))
|
||||
|
||||
prompt = "$ "
|
||||
while True:
|
||||
line = py3input(prompt).strip()
|
||||
toks = split_tokens(line)
|
||||
if len(toks) == 0:
|
||||
continue
|
||||
|
||||
if line in ("exit", "quit"):
|
||||
break
|
||||
|
||||
self.handleCommand(toks[0], toks[1:])
|
||||
|
||||
def handleCommand(self, cmd, args):
|
||||
try:
|
||||
if cmd == "help":
|
||||
return self.commandHelp(args)
|
||||
if cmd == "list":
|
||||
return self.commandList(args)
|
||||
if cmd == "delete":
|
||||
return self.commandDelete(args)
|
||||
if cmd == "print":
|
||||
return self.commandPrint(args)
|
||||
if cmd == "set":
|
||||
return self.commandSet(args)
|
||||
if cmd == "remove":
|
||||
return self.commandRemove(args)
|
||||
|
||||
raise Exception("Unknown command: %s" % cmd)
|
||||
except Exception as e:
|
||||
sys.stdout.write("%s\n" % str(e))
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def commandHelp(args):
|
||||
if len(args) == 0:
|
||||
sys.stdout.write("""\
|
||||
Commands:
|
||||
list stations
|
||||
Lists all available stations keys.
|
||||
|
||||
list profiles {mod}
|
||||
Lists all available profiles of a module.
|
||||
|
||||
list modules {sta}
|
||||
Lists all bound modules of a station incl. profiles (if used).
|
||||
|
||||
delete profile {mod} {profile}
|
||||
Deletes the given profile of given module. If the profile does not exist an
|
||||
error is raised.
|
||||
The module is removed from all stations that are using this profile.
|
||||
|
||||
delete binding {mod} {sta}
|
||||
Deletes the binding for given module and station. If the station is bound
|
||||
to module mod using a profile the binding is kept, removed otherwise.
|
||||
An existing binding file (etc/key/[mod]/station_[sta]) is deleted in any
|
||||
case.
|
||||
|
||||
print station {sta}
|
||||
Dumps all set binding parameters for the given station.
|
||||
|
||||
set profile {mod} {profile} {sta-sel}
|
||||
Sets for all selected stations a binding profile of a module.
|
||||
The resulting station file looks like this:
|
||||
...
|
||||
mod:profile
|
||||
...
|
||||
|
||||
This command checks for the existence of the specified profile
|
||||
|
||||
set module {mod} {sta-sel}
|
||||
Binds all selected stations to given module. No profiles are used
|
||||
and if any of the stations is already using a profile it is removed.
|
||||
The resulting station key file looks like this:
|
||||
...
|
||||
mod
|
||||
...
|
||||
|
||||
remove profile {mod} {profile} {sta-sel}
|
||||
Removes the binding profile of given module for all selected stations if
|
||||
module is bound already to that station.
|
||||
As a result all selected stations that are bound to the given module already
|
||||
will use a station key file afterwards.
|
||||
|
||||
mod:profile -> mod
|
||||
|
||||
remove module {mod} {sta-sel}
|
||||
Unbinds given module from selected stations. The line that refers to the
|
||||
given module is completely removed from the station key files.
|
||||
|
||||
exit
|
||||
Exit the shell.
|
||||
|
||||
quit
|
||||
Alias for exit.
|
||||
""")
|
||||
|
||||
def commandList(self, args):
|
||||
if len(args) == 0:
|
||||
raise Exception("Missing operand")
|
||||
|
||||
if args[0] == "stations":
|
||||
if len(args) > 2:
|
||||
raise Exception("Too many arguments")
|
||||
|
||||
if len(args) > 1:
|
||||
wild = convert_wildcard(args[1])
|
||||
else:
|
||||
wild = "*"
|
||||
|
||||
stas = []
|
||||
for f in sorted(glob.glob(os.path.join(self.env.key_dir, "station_" + wild))):
|
||||
stas.append(os.path.basename(f)[8:].replace("_", "."))
|
||||
|
||||
for s in stas:
|
||||
print(s)
|
||||
|
||||
return True
|
||||
|
||||
if args[0] == "profiles":
|
||||
if len(args) > 2:
|
||||
raise Exception("Too many arguments")
|
||||
if len(args) < 2:
|
||||
raise Exception("Expected: mod")
|
||||
|
||||
module = args[1]
|
||||
|
||||
for f in sorted(glob.glob(os.path.join(self.env.key_dir, module, "profile_*"))):
|
||||
print(os.path.basename(f)[8:])
|
||||
|
||||
return True
|
||||
|
||||
if args[0] == "modules":
|
||||
if len(args) > 2:
|
||||
raise Exception("Too many arguments")
|
||||
if len(args) < 2:
|
||||
raise Exception("Expected: sta")
|
||||
|
||||
sta = convert_stations(args[1])
|
||||
|
||||
f = os.path.join(self.env.key_dir, "station_" + sta)
|
||||
if not os.path.exists(f):
|
||||
raise Exception("%s: station key does not exists" % args[1])
|
||||
|
||||
for l in [line.strip() for line in open(f, "r").readlines()]:
|
||||
if l.startswith("#"):
|
||||
continue
|
||||
if len(l) == 0:
|
||||
continue
|
||||
print(l)
|
||||
|
||||
return True
|
||||
|
||||
raise Exception("Invalid argument: %s" % args[0])
|
||||
|
||||
def commandDelete(self, args):
|
||||
if len(args) == 0:
|
||||
raise Exception("Missing operand")
|
||||
|
||||
if args[0] == "profile":
|
||||
if len(args) > 3:
|
||||
raise Exception("Too many arguments")
|
||||
if len(args) < 3:
|
||||
raise Exception("Expected: mod profile")
|
||||
|
||||
module = args[1]
|
||||
profile = args[2]
|
||||
|
||||
if not os.path.exists(os.path.join(self.env.key_dir, module, "profile_" + profile)):
|
||||
raise Exception("%s/%s: profile not found" % (module, profile))
|
||||
|
||||
os.remove(os.path.join(self.env.key_dir,
|
||||
module, "profile_" + profile))
|
||||
|
||||
modified = 0
|
||||
for f in glob.glob(os.path.join(self.env.key_dir, "station_*")):
|
||||
lines = [line.strip() for line in open(f, "r").readlines()]
|
||||
|
||||
new_lines = []
|
||||
is_modified = False
|
||||
|
||||
for line in lines:
|
||||
# Comment line
|
||||
if line.startswith("#"):
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
# Empty line
|
||||
if not line:
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
toks = line.split(':')
|
||||
|
||||
# Wrong module name
|
||||
if toks[0] != module:
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
# Profile found
|
||||
if len(toks) > 1 and toks[1] == profile:
|
||||
# Filter line
|
||||
is_modified = True
|
||||
continue
|
||||
|
||||
new_lines.append(line)
|
||||
|
||||
if is_modified:
|
||||
modified += 1
|
||||
try:
|
||||
open(f, "w").write('\n'.join(new_lines))
|
||||
except Exception as e:
|
||||
sys.stdout.write("%s: %s\n" % (f, str(e)))
|
||||
|
||||
sys.stdout.write("OK, %d files modified\n" % modified)
|
||||
|
||||
return True
|
||||
|
||||
if args[0] == "binding":
|
||||
if len(args) > 3:
|
||||
raise Exception("Too many arguments")
|
||||
if len(args) < 3:
|
||||
raise Exception("Expected: mod profile")
|
||||
|
||||
module = args[1]
|
||||
sta = convert_stations(args[2])
|
||||
|
||||
if not os.path.exists(os.path.join(self.env.key_dir, module, "station_" + sta)):
|
||||
raise Exception("%s/%s: binding not found" % (module, args[2]))
|
||||
|
||||
os.remove(os.path.join(self.env.key_dir, module, "station_" + sta))
|
||||
|
||||
f = os.path.join(self.env.key_dir, "station_" + sta)
|
||||
try:
|
||||
lines = [line.strip() for line in open(f, "r").readlines()]
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
new_lines = []
|
||||
is_modified = False
|
||||
|
||||
for line in lines:
|
||||
# Comment line
|
||||
if line.startswith("#"):
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
# Empty line
|
||||
if len(line) == 0:
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
toks = line.split(':')
|
||||
|
||||
# Wrong module name
|
||||
if toks[0] != module:
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
# Profile found
|
||||
if len(toks) == 1:
|
||||
# Filter line
|
||||
is_modified = True
|
||||
continue
|
||||
|
||||
new_lines.append(line)
|
||||
|
||||
if is_modified:
|
||||
try:
|
||||
open(f, "w").write('\n'.join(new_lines))
|
||||
except Exception as e:
|
||||
sys.stdout.write("%s: %s\n" % (f, str(e)))
|
||||
|
||||
return True
|
||||
|
||||
raise Exception("Invalid argument: %s" % args[0])
|
||||
|
||||
def commandPrint(self, args):
|
||||
if len(args) == 0:
|
||||
raise Exception("Missing operand")
|
||||
|
||||
if args[0] == "station":
|
||||
if len(args) != 2:
|
||||
raise Exception("missing argument, expected: sta")
|
||||
|
||||
sta = convert_stations(args[1])
|
||||
key = os.path.join(self.env.key_dir, "station_" + sta)
|
||||
try:
|
||||
lines = [line.strip() for line in open(key, "r").readlines()]
|
||||
except IOError as e:
|
||||
raise Exception("%s: station not configured" % sta)
|
||||
except Exception as e:
|
||||
raise Exception("%s: unexpected error: %s" % (sta, str(e)))
|
||||
|
||||
first = True
|
||||
|
||||
for line in lines:
|
||||
# Comment line
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
# Empty line
|
||||
if len(line) == 0:
|
||||
continue
|
||||
|
||||
toks = line.split(':')
|
||||
|
||||
if len(toks) == 1:
|
||||
binding = os.path.join(
|
||||
self.env.key_dir, toks[0], "station_" + sta)
|
||||
else:
|
||||
binding = os.path.join(
|
||||
self.env.key_dir, toks[0], "profile_" + toks[1])
|
||||
|
||||
if not first:
|
||||
sys.stdout.write("\n")
|
||||
|
||||
first = False
|
||||
sys.stdout.write("[%s]\n" % toks[0])
|
||||
sys.stdout.write("%s\n" % binding)
|
||||
try:
|
||||
data = open(binding).read()
|
||||
sys.stdout.write("-"*80 + "\n")
|
||||
sys.stdout.write(data)
|
||||
sys.stdout.write("-"*80 + "\n")
|
||||
except IOError as e:
|
||||
sys.stdout.write("!binding not found\n")
|
||||
except Exception as e:
|
||||
sys.stdout.write("!unexpected error: %s\n" % str(e))
|
||||
|
||||
else:
|
||||
raise Exception("Invalid argument: %s" % args[0])
|
||||
|
||||
def commandSet(self, args):
|
||||
if len(args) == 0:
|
||||
raise Exception("Missing operand")
|
||||
|
||||
if args[0] == "profile":
|
||||
if len(args) != 4:
|
||||
raise Exception(
|
||||
"missing arguments, expected: module profile station-selector")
|
||||
|
||||
module = args[1]
|
||||
profile = args[2]
|
||||
|
||||
wild = convert_wildcard(args[3])
|
||||
|
||||
if not os.path.exists(os.path.join(self.env.key_dir, module, "profile_" + profile)):
|
||||
raise Exception("%s/%s: profile not found" % (module, profile))
|
||||
|
||||
modified = 0
|
||||
for f in glob.glob(os.path.join(self.env.key_dir, "station_" + wild)):
|
||||
lines = [line.strip() for line in open(f, "r").readlines()]
|
||||
|
||||
module_found = False
|
||||
is_modified = False
|
||||
|
||||
for i in range(len(lines)): #pylint: disable=C0200
|
||||
line = lines[i]
|
||||
|
||||
# Comment line
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
# Empty line
|
||||
if len(line) == 0:
|
||||
continue
|
||||
|
||||
toks = line.split(':')
|
||||
|
||||
# Wrong module name
|
||||
if toks[0] != module:
|
||||
continue
|
||||
|
||||
module_found = True
|
||||
|
||||
# No profile
|
||||
if len(toks) == 1:
|
||||
toks.append("")
|
||||
# Profile already set
|
||||
elif toks[1] == profile:
|
||||
continue
|
||||
|
||||
toks[1] = profile
|
||||
lines[i] = ':'.join(toks)
|
||||
|
||||
is_modified = True
|
||||
|
||||
if not module_found:
|
||||
lines.append("%s:%s\n" % (module, profile))
|
||||
is_modified = True
|
||||
|
||||
if is_modified:
|
||||
modified += 1
|
||||
try:
|
||||
open(f, "w").write('\n'.join(lines))
|
||||
except Exception as e:
|
||||
sys.stdout.write("%s: %s\n" % (f, str(e)))
|
||||
|
||||
sys.stdout.write("OK, %d files modified\n" % modified)
|
||||
|
||||
return True
|
||||
|
||||
if args[0] == "module":
|
||||
if len(args) != 3:
|
||||
raise Exception(
|
||||
"missing arguments, expected: module station-selector")
|
||||
|
||||
module = args[1]
|
||||
|
||||
wild = convert_wildcard(args[2])
|
||||
|
||||
modified = 0
|
||||
for f in glob.glob(os.path.join(self.env.key_dir, "station_" + wild)):
|
||||
lines = [line.strip() for line in open(f, "r").readlines()]
|
||||
|
||||
module_found = False
|
||||
is_modified = False
|
||||
|
||||
for i in range(len(lines)): #pylint: disable=C0200
|
||||
line = lines[i]
|
||||
|
||||
# Comment line
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
# Empty line
|
||||
if len(line) == 0:
|
||||
continue
|
||||
|
||||
toks = line.split(':')
|
||||
|
||||
# Wrong module name
|
||||
if toks[0] != module:
|
||||
continue
|
||||
|
||||
module_found = True
|
||||
|
||||
lines[i] = module
|
||||
|
||||
is_modified = True
|
||||
|
||||
if not module_found:
|
||||
lines.append("%s\n" % module)
|
||||
is_modified = True
|
||||
|
||||
if is_modified:
|
||||
modified += 1
|
||||
try:
|
||||
open(f, "w").write('\n'.join(lines))
|
||||
except Exception as e:
|
||||
sys.stdout.write("%s: %s\n" % (f, str(e)))
|
||||
|
||||
sys.stdout.write("OK, %d files modified\n" % modified)
|
||||
|
||||
return True
|
||||
|
||||
raise Exception("Invalid argument: %s" % args[0])
|
||||
|
||||
def commandRemove(self, args):
|
||||
if len(args) == 0:
|
||||
raise Exception("Missing operand")
|
||||
|
||||
if args[0] == "profile":
|
||||
if len(args) != 4:
|
||||
raise Exception(
|
||||
"Missing arguments, expected: module profile station-selector")
|
||||
|
||||
module = args[1]
|
||||
profile = args[2]
|
||||
|
||||
wild = convert_wildcard(args[3])
|
||||
|
||||
modified = 0
|
||||
for f in glob.glob(os.path.join(self.env.key_dir, "station_" + wild)):
|
||||
lines = [line.strip() for line in open(f, "r").readlines()]
|
||||
|
||||
is_modified = False
|
||||
for i in range(len(lines)): #pylint: disable=C0200
|
||||
line = lines[i]
|
||||
|
||||
# Comment line
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
|
||||
# Empty line
|
||||
if len(line) == 0:
|
||||
continue
|
||||
|
||||
toks = line.split(':')
|
||||
|
||||
# No profile
|
||||
if len(toks) == 1:
|
||||
continue
|
||||
|
||||
# Wrong module name
|
||||
if toks[0] != module:
|
||||
continue
|
||||
|
||||
# Wrong profile name
|
||||
if toks[1] != profile:
|
||||
continue
|
||||
|
||||
lines[i] = module
|
||||
is_modified = True
|
||||
|
||||
continue
|
||||
|
||||
if is_modified:
|
||||
modified += 1
|
||||
|
||||
if (len(lines) > 0) and (len(lines[-1]) > 0):
|
||||
lines.append("")
|
||||
|
||||
try:
|
||||
open(f, "w").write('\n'.join(lines))
|
||||
except Exception as e:
|
||||
sys.stdout.write("%s: %s\n" % (f, str(e)))
|
||||
|
||||
sys.stdout.write("OK, %d files modified\n" % modified)
|
||||
|
||||
return True
|
||||
|
||||
if args[0] == "module":
|
||||
if len(args) != 3:
|
||||
raise Exception(
|
||||
"Missing arguments, expected: module station-selector")
|
||||
|
||||
module = args[1]
|
||||
|
||||
wild = convert_wildcard(args[2])
|
||||
|
||||
modified = 0
|
||||
for f in glob.glob(os.path.join(self.env.key_dir, "station_" + wild)):
|
||||
lines = [line.strip() for line in open(f, "r").readlines()]
|
||||
new_lines = []
|
||||
|
||||
is_modified = False
|
||||
for line in lines:
|
||||
# Comment line
|
||||
if line.startswith("#"):
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
# Empty line
|
||||
if len(line) == 0:
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
toks = line.split(':')
|
||||
|
||||
# Wrong module name
|
||||
if toks[0] != module:
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
# Filter line
|
||||
is_modified = True
|
||||
|
||||
if is_modified:
|
||||
modified += 1
|
||||
if (len(new_lines) > 0) and (len(new_lines[-1]) > 0):
|
||||
new_lines.append("")
|
||||
|
||||
try:
|
||||
open(f, "w").write('\n'.join(new_lines))
|
||||
except Exception as e:
|
||||
sys.stdout.write("%s: %s\n" % (f, str(e)))
|
||||
|
||||
try:
|
||||
os.remove(os.path.join(self.env.key_dir,
|
||||
module, os.path.basename(f)))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
sys.stdout.write("OK, %d files modified\n" % modified)
|
||||
|
||||
return True
|
||||
|
||||
raise Exception("Invalid argument: %s" % args[0])
|
260
lib/python/seiscomp/slclient.py
Normal file
260
lib/python/seiscomp/slclient.py
Normal file
@ -0,0 +1,260 @@
|
||||
import os, sys, tempfile
|
||||
import datetime, time, re
|
||||
from seiscomp import mseedlite as mseed
|
||||
|
||||
def _timeparse(t, format):
|
||||
"""Parse a time string that might contain fractions of a second.
|
||||
|
||||
Fractional seconds are supported using a fragile, miserable hack.
|
||||
Given a time string like '02:03:04.234234' and a format string of
|
||||
'%H:%M:%S', time.strptime() will raise a ValueError with this
|
||||
message: 'unconverted data remains: .234234'. If %S is in the
|
||||
format string and the ValueError matches as above, a datetime
|
||||
object will be created from the part that matches and the
|
||||
microseconds in the time string.
|
||||
"""
|
||||
try:
|
||||
return datetime.datetime(*time.strptime(t, format)[0:6]).time()
|
||||
except ValueError as msg:
|
||||
if "%S" in format:
|
||||
msg = str(msg)
|
||||
mat = re.match(r"unconverted data remains:"
|
||||
" \.([0-9]{1,6})$", msg)
|
||||
if mat is not None:
|
||||
# fractional seconds are present - this is the style
|
||||
# used by datetime's isoformat() method
|
||||
frac = "." + mat.group(1)
|
||||
t = t[:-len(frac)]
|
||||
t = datetime.datetime(*time.strptime(t, format)[0:6])
|
||||
microsecond = int(float(frac)*1e6)
|
||||
return t.replace(microsecond=microsecond)
|
||||
else:
|
||||
mat = re.match(r"unconverted data remains:"
|
||||
" \,([0-9]{3,3})$", msg)
|
||||
if mat is not None:
|
||||
# fractional seconds are present - this is the style
|
||||
# used by the logging module
|
||||
frac = "." + mat.group(1)
|
||||
t = t[:-len(frac)]
|
||||
t = datetime.datetime(*time.strptime(t, format)[0:6])
|
||||
microsecond = int(float(frac)*1e6)
|
||||
return t.replace(microsecond=microsecond)
|
||||
|
||||
raise
|
||||
|
||||
def timeparse(t):
|
||||
return _timeparse(t, "%Y/%m/%d %H:%M:%S")
|
||||
|
||||
|
||||
class Input(mseed.Input):
|
||||
|
||||
def __init__(self, server, streams,
|
||||
stime=None, etime=None, timeout=None, verbose=0):
|
||||
|
||||
# XXX Add the possibility for supplying stime and etime as
|
||||
# individual times for each stream.
|
||||
|
||||
"""
|
||||
'streams' must be a list containing tuples of (net,sta,loc,cha)
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
|
||||
streams = [ "%-3s %5s %s%3s.D" % s for s in streams ]
|
||||
streams.sort()
|
||||
|
||||
self.tmp = tempfile.NamedTemporaryFile(mode="w", prefix="slinktool.")
|
||||
self.tmp.write("\n".join(streams)+"\n")
|
||||
self.tmp.flush()
|
||||
if verbose:
|
||||
sys.stderr.write("\n".join(streams)+"\n")
|
||||
|
||||
slinktool = os.getenv("SLINKTOOL")
|
||||
if not slinktool:
|
||||
slinktool = "slinktool"
|
||||
args = [slinktool, "-l", self.tmp.name, "-o", "-"]
|
||||
if stime:
|
||||
args.append("-tw")
|
||||
tw = "%d,%d,%d,%d,%d,%d:" % (stime.year,stime.month,stime.day,stime.hour,stime.minute,stime.second)
|
||||
if etime:
|
||||
rw += "%d,%d,%d,%d,%d,%d" % (etime.year,etime.month,etime.day,etime.hour,etime.minute,etime.second)
|
||||
args.append(tw)
|
||||
if verbose: args.append("-v")
|
||||
|
||||
if timeout:
|
||||
try: assert int(timeout) > 0
|
||||
except: raise TypeError("illegal timeout parameter")
|
||||
args += ["-nt", "%d" % int(timeout)]
|
||||
|
||||
args.append(server)
|
||||
# start 'slinktool' as sub-process
|
||||
self.popen = subprocess.Popen(args, stdout=subprocess.PIPE, shell=False)
|
||||
infile = self.popen.stdout
|
||||
|
||||
mseed.Input.__init__(self, infile)
|
||||
|
||||
def __del__(self):
|
||||
"""
|
||||
Shut down SeedLink connections and close input.
|
||||
"""
|
||||
sys.stderr.write("shutting down slinktool\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
slinktool_pid = self.popen.pid
|
||||
# It would of course be much better to send SIGTERM,
|
||||
# but somehow slinktool often appears to ignore it.
|
||||
# XXX Need to figure out why, and perhaps fix it (not critical).
|
||||
self.popen.kill()
|
||||
self.popen.communicate()
|
||||
# mseed.Input.__del__(self) # closes the input file
|
||||
|
||||
|
||||
|
||||
class Input2(mseed.Input):
|
||||
|
||||
def __init__(self, server, streams, stime=None, etime=None, verbose=0):
|
||||
|
||||
"""
|
||||
XXX information not uptodate!!! XXX
|
||||
|
||||
'streams' must be a dict containing tuples of (stime, etime),
|
||||
with the key being the stream_id and stime and etime being
|
||||
the starting and end time of the time window, respectively.
|
||||
The times must be seis.Time objects. For instance
|
||||
|
||||
stime = seis.Time(...)
|
||||
etime = seis.Time(...)
|
||||
streams["GE.KBS.00.BHZ.D"] = (stime, etime)
|
||||
|
||||
It is more efficient to request the same time interval for
|
||||
all streams. Wildcards for the channels are allowed. If
|
||||
stime is None, only new data are retrieved as they come in.
|
||||
"""
|
||||
|
||||
streams = [ "%-3s %5s %s%3s.D" % tuple(s.split(".")[:4])
|
||||
for s in streams ]
|
||||
streams.sort()
|
||||
|
||||
self.tmp = tempfile.NamedTemporaryFile(mode="w", prefix="slinktool.")
|
||||
self.tmp.write("\n".join(streams)+"\n")
|
||||
sys.stderr.write("\n".join(streams)+"\n")
|
||||
self.tmp.flush()
|
||||
|
||||
cmd = "slinktool -l %s -o -" % self.tmp.name
|
||||
if stime:
|
||||
assert isinstance(stime, seis.Time)
|
||||
cmd += " -tw %d,%d,%d,%d,%d,%d:" % stime.asDate
|
||||
if etime:
|
||||
assert isinstance(etime, seis.Time)
|
||||
cmd += "%d,%d,%d,%d,%d,%d" % etime.asDate
|
||||
cmd = cmd + "%s '%s'" % (verbose*" -v", server)
|
||||
|
||||
infile = os.popen(cmd)
|
||||
|
||||
mseed.Input.__init__(self, infile)
|
||||
|
||||
|
||||
def available(server="localhost:18000",
|
||||
time_window=None, stream_ids=None, verbose=0):
|
||||
|
||||
"""
|
||||
Connects to server and returns a dictionary of lists of available
|
||||
time windows as tuples of (start_time, end_time) for each available
|
||||
stream. The stream set can be limited by specifying a list of
|
||||
stream_ids in the format usual format, i.e. net.sta.loc.cha.type,
|
||||
e.g. "GE.KBS.00.BHZ.D".
|
||||
Note that often the returned lists contain only one time tuple,
|
||||
corresponding to one contiguous time window available.
|
||||
|
||||
NEW:
|
||||
The search for available data can be limited to a time window by
|
||||
specifying the "time_window" parameter, which must be a tuple
|
||||
containing the starting and end time as seis.Time objects.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
if time_window:
|
||||
stime, etime = time_window
|
||||
assert stime <= etime
|
||||
else:
|
||||
stime, etime = None, None
|
||||
|
||||
cmd = "slinktool -Q %s %s " % (verbose*"-v ", server)
|
||||
infile = os.popen(cmd)
|
||||
windows = {}
|
||||
|
||||
# parse the output of "slinktool -Q"
|
||||
# It is assumed that the lines consist of the fields
|
||||
# net,sta,[loc,], cha, type, date1, time1, "-", date2, time2
|
||||
# Since the location code (loc) may or may not be present, we
|
||||
# determine the position of the dash "-" to determine where the
|
||||
# other fields are.
|
||||
regex = re.compile("^[A-Z][A-Z]\ [A-Z].*[12][0-9]{3}(/[0-9]{2}){2}.*$")
|
||||
for line in infile:
|
||||
if regex.match(line): # line containing a time window, a bit crude
|
||||
|
||||
line = line.split()
|
||||
try:
|
||||
dash = line.index("-")
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
if dash==7: # location code is present
|
||||
loc = line[2]
|
||||
else: loc = ""
|
||||
|
||||
net, sta, cha, typ = line[0], line[1], line[dash-4], line[dash-3]
|
||||
|
||||
stream_id = "%s.%s.%s.%s.%s" % (net, sta, loc, cha, typ)
|
||||
|
||||
if stream_ids and stream_id not in stream_ids:
|
||||
continue
|
||||
|
||||
t1 = seis.Time("%s %s" % (line[dash-2], line[dash-1]))
|
||||
t2 = seis.Time("%s %s" % (line[dash+1], line[dash+2]))
|
||||
|
||||
if stime and t2<stime or etime and t1>etime:
|
||||
continue # non-overlapping time windows
|
||||
|
||||
if stime and t1<stime:
|
||||
t1 = stime
|
||||
if etime and t2>etime:
|
||||
t2 = etime
|
||||
|
||||
if not stream_id in windows:
|
||||
windows[stream_id] = []
|
||||
|
||||
windows[stream_id].append((t1,t2))
|
||||
|
||||
elif verbose:
|
||||
# probably some diagnostic output
|
||||
sys.stdout.write("%s\n" % line.strip())
|
||||
|
||||
return windows
|
||||
|
||||
|
||||
def server_version(host, port=18000):
|
||||
|
||||
import socket
|
||||
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
try:
|
||||
s.connect((host, port))
|
||||
except:
|
||||
return None
|
||||
s.send("HELLO\n")
|
||||
data = s.recv(1024)
|
||||
s.close()
|
||||
if data[:8] != "SeedLink":
|
||||
return None
|
||||
|
||||
return data[10:13]
|
||||
|
||||
|
||||
def server_running(host, port=18000):
|
||||
|
||||
if server_version(host, port):
|
||||
return True
|
||||
|
||||
return False
|
2447
lib/python/seiscomp/system.py
Normal file
2447
lib/python/seiscomp/system.py
Normal file
File diff suppressed because it is too large
Load Diff
245
lib/python/seiscomp/utils.py
Normal file
245
lib/python/seiscomp/utils.py
Normal file
@ -0,0 +1,245 @@
|
||||
# This file was automatically generated by SWIG (http://www.swig.org).
|
||||
# Version 4.0.2
|
||||
#
|
||||
# Do not make changes to this file unless you know what you are doing--modify
|
||||
# the SWIG interface file instead.
|
||||
|
||||
from sys import version_info as _swig_python_version_info
|
||||
if _swig_python_version_info < (2, 7, 0):
|
||||
raise RuntimeError("Python 2.7 or later required")
|
||||
|
||||
# Import the low-level C/C++ module
|
||||
if __package__ or "." in __name__:
|
||||
from . import _utils
|
||||
else:
|
||||
import _utils
|
||||
|
||||
try:
|
||||
import builtins as __builtin__
|
||||
except ImportError:
|
||||
import __builtin__
|
||||
|
||||
def _swig_repr(self):
|
||||
try:
|
||||
strthis = "proxy of " + self.this.__repr__()
|
||||
except __builtin__.Exception:
|
||||
strthis = ""
|
||||
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_instance_variable(set):
|
||||
def set_instance_attr(self, name, value):
|
||||
if name == "thisown":
|
||||
self.this.own(value)
|
||||
elif name == "this":
|
||||
set(self, name, value)
|
||||
elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
|
||||
set(self, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add instance attributes to %s" % self)
|
||||
return set_instance_attr
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_class_variable(set):
|
||||
def set_class_attr(cls, name, value):
|
||||
if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
|
||||
set(cls, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add class attributes to %s" % cls)
|
||||
return set_class_attr
|
||||
|
||||
|
||||
def _swig_add_metaclass(metaclass):
|
||||
"""Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
|
||||
def wrapper(cls):
|
||||
return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
|
||||
return wrapper
|
||||
|
||||
|
||||
class _SwigNonDynamicMeta(type):
|
||||
"""Meta class to enforce nondynamic attributes (no new attributes) for a class"""
|
||||
__setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
|
||||
|
||||
|
||||
class SwigPyIterator(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
raise AttributeError("No constructor defined - class is abstract")
|
||||
__repr__ = _swig_repr
|
||||
__swig_destroy__ = _utils.delete_SwigPyIterator
|
||||
|
||||
def value(self):
|
||||
return _utils.SwigPyIterator_value(self)
|
||||
|
||||
def incr(self, n=1):
|
||||
return _utils.SwigPyIterator_incr(self, n)
|
||||
|
||||
def decr(self, n=1):
|
||||
return _utils.SwigPyIterator_decr(self, n)
|
||||
|
||||
def distance(self, x):
|
||||
return _utils.SwigPyIterator_distance(self, x)
|
||||
|
||||
def equal(self, x):
|
||||
return _utils.SwigPyIterator_equal(self, x)
|
||||
|
||||
def copy(self):
|
||||
return _utils.SwigPyIterator_copy(self)
|
||||
|
||||
def next(self):
|
||||
return _utils.SwigPyIterator_next(self)
|
||||
|
||||
def __next__(self):
|
||||
return _utils.SwigPyIterator___next__(self)
|
||||
|
||||
def previous(self):
|
||||
return _utils.SwigPyIterator_previous(self)
|
||||
|
||||
def advance(self, n):
|
||||
return _utils.SwigPyIterator_advance(self, n)
|
||||
|
||||
def __eq__(self, x):
|
||||
return _utils.SwigPyIterator___eq__(self, x)
|
||||
|
||||
def __ne__(self, x):
|
||||
return _utils.SwigPyIterator___ne__(self, x)
|
||||
|
||||
def __iadd__(self, n):
|
||||
return _utils.SwigPyIterator___iadd__(self, n)
|
||||
|
||||
def __isub__(self, n):
|
||||
return _utils.SwigPyIterator___isub__(self, n)
|
||||
|
||||
def __add__(self, n):
|
||||
return _utils.SwigPyIterator___add__(self, n)
|
||||
|
||||
def __sub__(self, *args):
|
||||
return _utils.SwigPyIterator___sub__(self, *args)
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
# Register SwigPyIterator in _utils:
|
||||
_utils.SwigPyIterator_swigregister(SwigPyIterator)
|
||||
|
||||
|
||||
def basename(name):
|
||||
return _utils.basename(name)
|
||||
|
||||
def fileExists(file):
|
||||
return _utils.fileExists(file)
|
||||
|
||||
def pathExists(path):
|
||||
return _utils.pathExists(path)
|
||||
|
||||
def createPath(path):
|
||||
return _utils.createPath(path)
|
||||
|
||||
def removeExtension(name):
|
||||
return _utils.removeExtension(name)
|
||||
|
||||
def bytesToStreambuf(data, n):
|
||||
return _utils.bytesToStreambuf(data, n)
|
||||
|
||||
def stringToStreambuf(str):
|
||||
return _utils.stringToStreambuf(str)
|
||||
|
||||
def file2ostream(fn):
|
||||
return _utils.file2ostream(fn)
|
||||
|
||||
def file2istream(fn):
|
||||
return _utils.file2istream(fn)
|
||||
class StopWatch(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_utils.StopWatch_swiginit(self, _utils.new_StopWatch(*args))
|
||||
|
||||
def restart(self):
|
||||
return _utils.StopWatch_restart(self)
|
||||
|
||||
def reset(self):
|
||||
return _utils.StopWatch_reset(self)
|
||||
|
||||
def isActive(self):
|
||||
return _utils.StopWatch_isActive(self)
|
||||
|
||||
def elapsed(self):
|
||||
return _utils.StopWatch_elapsed(self)
|
||||
__swig_destroy__ = _utils.delete_StopWatch
|
||||
|
||||
# Register StopWatch in _utils:
|
||||
_utils.StopWatch_swigregister(StopWatch)
|
||||
|
||||
class Timer(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, timeoutseconds=0):
|
||||
_utils.Timer_swiginit(self, _utils.new_Timer(timeoutseconds))
|
||||
__swig_destroy__ = _utils.delete_Timer
|
||||
|
||||
def setTimeout(self, seconds):
|
||||
return _utils.Timer_setTimeout(self, seconds)
|
||||
|
||||
def setTimeout2(self, seconds, nanoseconds):
|
||||
return _utils.Timer_setTimeout2(self, seconds, nanoseconds)
|
||||
|
||||
def setCallback(self, arg2):
|
||||
return _utils.Timer_setCallback(self, arg2)
|
||||
|
||||
def setSingleShot(self, arg2):
|
||||
return _utils.Timer_setSingleShot(self, arg2)
|
||||
|
||||
def start(self):
|
||||
return _utils.Timer_start(self)
|
||||
|
||||
def stop(self):
|
||||
return _utils.Timer_stop(self)
|
||||
|
||||
def disable(self):
|
||||
return _utils.Timer_disable(self)
|
||||
|
||||
def isActive(self):
|
||||
return _utils.Timer_isActive(self)
|
||||
|
||||
# Register Timer in _utils:
|
||||
_utils.Timer_swigregister(Timer)
|
||||
|
||||
class UnitConversion(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_utils.UnitConversion_swiginit(self, _utils.new_UnitConversion(*args))
|
||||
fromUnit = property(_utils.UnitConversion_fromUnit_get, _utils.UnitConversion_fromUnit_set)
|
||||
toUnit = property(_utils.UnitConversion_toUnit_get, _utils.UnitConversion_toUnit_set)
|
||||
toQMLUnit = property(_utils.UnitConversion_toQMLUnit_get, _utils.UnitConversion_toQMLUnit_set)
|
||||
toSEEDUnit = property(_utils.UnitConversion_toSEEDUnit_get, _utils.UnitConversion_toSEEDUnit_set)
|
||||
scale = property(_utils.UnitConversion_scale_get, _utils.UnitConversion_scale_set)
|
||||
__swig_destroy__ = _utils.delete_UnitConversion
|
||||
|
||||
# Register UnitConversion in _utils:
|
||||
_utils.UnitConversion_swigregister(UnitConversion)
|
||||
|
||||
class UnitConverter(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
@staticmethod
|
||||
def get(fromUnit):
|
||||
return _utils.UnitConverter_get(fromUnit)
|
||||
|
||||
def __init__(self):
|
||||
_utils.UnitConverter_swiginit(self, _utils.new_UnitConverter())
|
||||
__swig_destroy__ = _utils.delete_UnitConverter
|
||||
|
||||
# Register UnitConverter in _utils:
|
||||
_utils.UnitConverter_swigregister(UnitConverter)
|
||||
|
||||
def UnitConverter_get(fromUnit):
|
||||
return _utils.UnitConverter_get(fromUnit)
|
||||
|
||||
|
||||
|
Reference in New Issue
Block a user