[installation] Init with inital config for global

This commit is contained in:
2025-10-30 15:08:17 +01:00
commit 7640b452ed
3678 changed files with 2200095 additions and 0 deletions

1
lib/libbson-1.0.so Symbolic link
View File

@ -0,0 +1 @@
libbson-1.0.so.0

1
lib/libbson-1.0.so.0 Symbolic link
View File

@ -0,0 +1 @@
libbson-1.0.so.0.0.0

BIN
lib/libbson-1.0.so.0.0.0 Normal file

Binary file not shown.

1
lib/libmseed.so Symbolic link
View File

@ -0,0 +1 @@
libmseed.so.2.19

BIN
lib/libmseed.so.2.19 Normal file

Binary file not shown.

BIN
lib/libseiscomp_broker.so Normal file

Binary file not shown.

1
lib/libseiscomp_client.so Symbolic link
View File

@ -0,0 +1 @@
libseiscomp_client.so.16

View File

@ -0,0 +1 @@
libseiscomp_client.so.16.4.0

Binary file not shown.

BIN
lib/libseiscomp_config.so Normal file

Binary file not shown.

1
lib/libseiscomp_core.so Symbolic link
View File

@ -0,0 +1 @@
libseiscomp_core.so.16

1
lib/libseiscomp_core.so.16 Symbolic link
View File

@ -0,0 +1 @@
libseiscomp_core.so.16.4.0

Binary file not shown.

BIN
lib/libseiscomp_daplugin.so Normal file

Binary file not shown.

Binary file not shown.

BIN
lib/libseiscomp_evplugin.so Normal file

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1 @@
libseiscomp_gempaasio.so.3

View File

@ -0,0 +1 @@
libseiscomp_gempaasio.so.3.0.0

Binary file not shown.

1
lib/libseiscomp_gempagui.so Symbolic link
View File

@ -0,0 +1 @@
libseiscomp_gempagui.so.5

View File

@ -0,0 +1 @@
libseiscomp_gempagui.so.5.4.1

Binary file not shown.

View File

@ -0,0 +1 @@
libseiscomp_gempautils.so.4

View File

@ -0,0 +1 @@
libseiscomp_gempautils.so.4.7.0

Binary file not shown.

BIN
lib/libseiscomp_mplugin.so Normal file

Binary file not shown.

BIN
lib/libseiscomp_qcplugin.so Normal file

Binary file not shown.

1
lib/libseiscomp_qt.so Symbolic link
View File

@ -0,0 +1 @@
libseiscomp_qt.so.16

1
lib/libseiscomp_qt.so.16 Symbolic link
View File

@ -0,0 +1 @@
libseiscomp_qt.so.16.4.0

Binary file not shown.

BIN
lib/libseiscomp_unittest.so Normal file

Binary file not shown.

View File

@ -0,0 +1,10 @@
prefix=/home/sysop/gitlocal/bmp/6-release/seiscomp/build-gpkg/deploy/seiscomp
exec_prefix=${prefix}
libdir=${prefix}/lib
includedir=${exec_prefix}/include
Name: libbson
Description: The libbson BSON serialization library.
Version: 1.14.0
Libs: -L${libdir} -lbson-1.0
Cflags: -I${includedir}/libbson-1.0

1409
lib/python/gempa/CAPS.py Normal file

File diff suppressed because it is too large Load Diff

BIN
lib/python/gempa/CAPS.pyo Normal file

Binary file not shown.

View File

@ -0,0 +1,154 @@
# This file was automatically generated by SWIG (https://www.swig.org).
# Version 4.3.0
#
# Do not make changes to this file unless you know what you are doing - modify
# the SWIG interface file instead.
from sys import version_info as _swig_python_version_info
# Import the low-level C/C++ module
if __package__ or "." in __name__:
from . import _gProcessing
else:
import _gProcessing
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
def _swig_setattr_nondynamic_instance_variable(set):
def set_instance_attr(self, name, value):
if name == "this":
set(self, name, value)
elif name == "thisown":
self.this.own(value)
elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
set(self, name, value)
else:
raise AttributeError("You cannot add instance attributes to %s" % self)
return set_instance_attr
def _swig_setattr_nondynamic_class_variable(set):
def set_class_attr(cls, name, value):
if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
set(cls, name, value)
else:
raise AttributeError("You cannot add class attributes to %s" % cls)
return set_class_attr
def _swig_add_metaclass(metaclass):
"""Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
def wrapper(cls):
return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
return wrapper
class _SwigNonDynamicMeta(type):
"""Meta class to enforce nondynamic attributes (no new attributes) for a class"""
__setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
class Ecef2Enu(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, lat, lon, h):
_gProcessing.Ecef2Enu_swiginit(self, _gProcessing.new_Ecef2Enu(lat, lon, h))
def convert(self, x, y, z):
return _gProcessing.Ecef2Enu_convert(self, x, y, z)
__swig_destroy__ = _gProcessing.delete_Ecef2Enu
# Register Ecef2Enu in _gProcessing:
_gProcessing.Ecef2Enu_swigregister(Ecef2Enu)
class Enu2Ecef(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, lat, lon, h):
_gProcessing.Enu2Ecef_swiginit(self, _gProcessing.new_Enu2Ecef(lat, lon, h))
def convert(self, e, n, u):
return _gProcessing.Enu2Ecef_convert(self, e, n, u)
__swig_destroy__ = _gProcessing.delete_Enu2Ecef
# Register Enu2Ecef in _gProcessing:
_gProcessing.Enu2Ecef_swigregister(Enu2Ecef)
def geodetic2ecef(lat, lon, h):
return _gProcessing.geodetic2ecef(lat, lon, h)
def distance(strike1, dip1, rake1, strike2, dip2, rake2, scaleX=1.0, scaleY=1.0, scaleZ=1.0):
return _gProcessing.distance(strike1, dip1, rake1, strike2, dip2, rake2, scaleX, scaleY, scaleZ)
def rotAngleNP(strike1, dip1, rake1, strike2, dip2, rake2):
return _gProcessing.rotAngleNP(strike1, dip1, rake1, strike2, dip2, rake2)
def rotAngleMT(strike1, dip1, rake1, strike2, dip2, rake2):
return _gProcessing.rotAngleMT(strike1, dip1, rake1, strike2, dip2, rake2)
def otherNodalPlane(inStrike, inDip, inRake):
return _gProcessing.otherNodalPlane(inStrike, inDip, inRake)
def nodalPlane2Tensor(strike, dip, rake):
return _gProcessing.nodalPlane2Tensor(strike, dip, rake)
class Vector3D(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, *args):
_gProcessing.Vector3D_swiginit(self, _gProcessing.new_Vector3D(*args))
def length(self):
return _gProcessing.Vector3D_length(self)
def dot(self, v):
return _gProcessing.Vector3D_dot(self, v)
def cross(self, a, b):
return _gProcessing.Vector3D_cross(self, a, b)
def normalize(self):
return _gProcessing.Vector3D_normalize(self)
def __imul__(self, scale):
return _gProcessing.Vector3D___imul__(self, scale)
def __mul__(self, *args):
return _gProcessing.Vector3D___mul__(self, *args)
def __iadd__(self, other):
return _gProcessing.Vector3D___iadd__(self, other)
def __isub__(self, other):
return _gProcessing.Vector3D___isub__(self, other)
def __add__(self, other):
return _gProcessing.Vector3D___add__(self, other)
def __sub__(self, other):
return _gProcessing.Vector3D___sub__(self, other)
def fromAngles(self, radAzimuth, radDip):
return _gProcessing.Vector3D_fromAngles(self, radAzimuth, radDip)
def toAngles(self, radAzimuth, radDip):
return _gProcessing.Vector3D_toAngles(self, radAzimuth, radDip)
x = property(_gProcessing.Vector3D_x_get, _gProcessing.Vector3D_x_set)
y = property(_gProcessing.Vector3D_y_get, _gProcessing.Vector3D_y_set)
z = property(_gProcessing.Vector3D_z_get, _gProcessing.Vector3D_z_set)
__swig_destroy__ = _gProcessing.delete_Vector3D
# Register Vector3D in _gProcessing:
_gProcessing.Vector3D_swigregister(Vector3D)

Binary file not shown.

View File

BIN
lib/python/gempa/_gCAPS.so Normal file

Binary file not shown.

Binary file not shown.

View File

View File

@ -0,0 +1,104 @@
#!/usr/bin/env python3
############################################################################
# Copyright (C) 2024 by gempa GmbH #
# #
# All Rights Reserved. #
# #
# NOTICE: All information contained herein is, and remains #
# the property of gempa GmbH and its suppliers, if any. The intellectual #
# and technical concepts contained herein are proprietary to gempa GmbH #
# and its suppliers. #
# Dissemination of this information or reproduction of this material #
# is strictly forbidden unless prior written permission is obtained #
# from gempa GmbH. #
############################################################################
import os
from seiscomp import logging
from gempa import CAPS
class JournalItem:
def __init__(self, startTime=None, endTime=None):
self.startTime = startTime
self.endTime = endTime
class Journal:
# -------------------------------------------------------------------------
def __init__(self):
self.items = {}
# -------------------------------------------------------------------------
def get(self, streamID):
return self.items.get(streamID)
# -------------------------------------------------------------------------
def read(self, filename):
try:
f = open(filename, "r", encoding="UTF-8")
except Exception as err:
logging.error(f"Journal: Could not open file: {err}")
return False
try:
lineNo = 0
for line in f:
line = line.strip()
if line.startswith("#"):
continue
try:
stationID, strStartTime, strEndTime = line.split(" ")
except ValueError:
logging.error(
f"Journal: Invalid line format in line {lineNo}"
)
return False
item = JournalItem()
item.startTime = CAPS.Time.FromString(strStartTime, "%FT%T.%Z")
item.endTime = CAPS.Time.FromString(strEndTime, "%FT%T.%Z")
self.items[stationID] = item
lineNo += 1
except IOError as err:
logging.error(f"Journal: Could not read journal from file: {err}")
finally:
f.close()
logging.info("Recovered journal")
for k, v in self.items.items():
logging.info(f" + {k} {v.startTime.iso()} ~ {v.endTime.iso()}")
logging.info("End")
return True
# -------------------------------------------------------------------------
def write(self, filename):
path = os.path.dirname(filename)
if not path:
return False
if not os.path.exists(path):
try:
os.makedirs(path)
except Exception as err:
logging.error(f"Journal: Could not create directory: {err}")
return False
try:
with open(filename, "w", encoding="UTF-8") as f:
for k, v in self.items.items():
f.write(f"{k} {v.startTime.iso()} {v.endTime.iso()}\n")
except Exception as err:
logging.error(f"Journal: Faild to write journal: {err}")
return False
return True

View File

@ -0,0 +1,127 @@
#!/usr/bin/env python3
############################################################################
# Copyright (C) 2024 by gempa GmbH #
# #
# All Rights Reserved. #
# #
# NOTICE: All information contained herein is, and remains #
# the property of gempa GmbH and its suppliers, if any. The intellectual #
# and technical concepts contained herein are proprietary to gempa GmbH #
# and its suppliers. #
# Dissemination of this information or reproduction of this material #
# is strictly forbidden unless prior written permission is obtained #
# from gempa GmbH. #
############################################################################
import os
from seiscomp import logging
class StreamMapItem:
def __init__(self):
self.networkCode = ""
self.stationCode = ""
self.locationCode = ""
self.stationID = ""
self.baseCode = None
self.folder = None
self.startTime = None
self.endTime = None
class StreamMap:
# -------------------------------------------------------------------------
def __init__(self):
self.items = {}
# -------------------------------------------------------------------------
def get(self, streamID):
return self.items.get(streamID)
# -------------------------------------------------------------------------
def read(self, filename):
try:
f = open(filename, "r", encoding="UTF-8")
except Exception as err:
logging.error(f"Stream map: Could not open file: {err}")
return False
try:
lineNo = -1
for line in f:
lineNo += 1
line = line.strip()
if line.startswith("#"):
continue
if len(line) == 0:
continue
folder = line.strip()
toks = folder.split("_")
tokCount = len(toks)
if tokCount != 3:
logging.error(
f"Stream map: Invalid stream ID in line {lineNo}"
)
continue
item = StreamMapItem()
item.networkCode = toks[0]
item.stationCode = toks[1]
item.locationCode = toks[2]
item.baseCode = str(int(item.networkCode[0:3]))
item.folder = folder
item.stationID = (
item.networkCode
+ "."
+ item.stationCode
+ "."
+ item.locationCode
)
self.items[item.stationID] = item
except IOError as err:
logging.error(
f"Stream map: Could not read stream map from file: {err}"
)
finally:
f.close()
if len(self.items) == 0:
logging.info("No streams configured: Nothing todo")
return False
logging.info("Configured stations")
for k, _v in self.items.items():
logging.info(f" + {k}")
logging.info("End")
return True
# -------------------------------------------------------------------------
def write(self, filename):
path = os.path.dirname(filename)
if not path:
return False
if not os.path.exists(path):
try:
os.makedirs(path)
except Exception as err:
logging.error(f"Stream map: Could not create directory: {err}")
return False
try:
with open(filename, "w", encoding="UTF-8") as f:
for k, v in self.items.items():
f.write(f"{k} {v.startTime.iso()} {v.endTime.iso()}\n")
except Exception as err:
logging.error(f"Stream map: Could not open file: {err}")
return False
return True

View File

@ -0,0 +1,36 @@
#!/usr/bin/env python3
############################################################################
# Copyright (C) 2024 by gempa GmbH #
# #
# All Rights Reserved. #
# #
# NOTICE: All information contained herein is, and remains #
# the property of gempa GmbH and its suppliers, if any. The intellectual #
# and technical concepts contained herein are proprietary to gempa GmbH #
# and its suppliers. #
# Dissemination of this information or reproduction of this material #
# is strictly forbidden unless prior written permission is obtained #
# from gempa GmbH. #
############################################################################
import numpy as np
from gempa import CAPS
def calculateAbsPerc(grid, percentile=99.9):
grid_array = np.array(grid)
result = np.percentile(np.abs(grid_array), percentile)
return result
def parseTime(s):
formats = ["%F", "%F %T", "%F %T.%Z", "%FT%T", "%FT%T.%Z"]
for fmt in formats:
time = CAPS.Time.FromString(s, fmt)
if time.valid():
return time
return None

View File

View File

@ -0,0 +1,366 @@
from __future__ import print_function
import seiscomp.datamodel, seiscomp.core, seiscomp.config
from .helpers import parsers
import datetime
import sys
class sc3(object):
def _fillSc3(self, obj, att):
commentNum = 0
for (k, p) in att.items():
try:
if k == 'Comment':
# print('DEBUG: Adding comment', p)
if p.startswith('Grant'):
# 2020: These belong in DOI metadata, not here.
continue
c = seiscomp.datamodel.Comment()
c.setText(p)
c.setId(str(commentNum))
commentNum += 1
obj.add(c)
continue
if k == 'Pid':
# print('DEBUG: Adding Pid as comment', p)
c = seiscomp.datamodel.Comment()
(typ, val) = p.split(':', 1)
s = '{"type":"%s", "value":"%s"}' % (typ.upper(), val)
c.setText(s)
c.setId('FDSNXML:Identifier/' + str(commentNum))
commentNum += 1
obj.add(c)
continue
w = 'set' + k
p = self.sc3Valid['attributes'][k]['validator'](p)
getattr(obj, w)(p)
except Exception as e:
print("[Error] %s = %s (%s)" % (k, p, e),
file=sys.stderr)
@staticmethod
def getBool(val):
if val == "True" or val == 1:
return True
elif val == "False" or val == 0:
return False
else:
raise Exception("Invalid Boolean Value")
@staticmethod
def getString(data):
return data.strip()
@staticmethod
def getRealArray(data):
RA = seiscomp.datamodel.RealArray()
for r in map(float, data):
RA.content().push_back(r)
return RA
@staticmethod
def getComplexArray(data):
CA = seiscomp.datamodel.ComplexArray()
for (r,i) in data:
CA.content().push_back(complex(float(r),float(i)))
return CA
@staticmethod
def getDate(value):
if isinstance(value, datetime.datetime):
return seiscomp.core.Time(*(value.timetuple()[:6]))
elif isinstance(value, str):
value = parsers.parseDate(value)
return seiscomp.core.Time(*(value.timetuple()[:6]))
return value
@staticmethod
def getBlob(value):
b = seiscomp.datamodel.Blob()
b.setContent(value)
return b
@staticmethod
def getStationGroupType(val):
if val == "ARRAY":
return seiscomp.datamodel.ARRAY
elif val == "DEPLOYMENT":
return seiscomp.datamodel.DEPLOYMENT
else:
raise Exception("Invalid station group type")
@staticmethod
def _findValidOnes(mode):
valid = {
'dataloggerCalibration': {
'creator': seiscomp.datamodel.DataloggerCalibration,
'attributes': {
'SerialNumber': { 'validator': sc3.getString },
'Channel': { 'validator': int },
'Start': { 'validator': sc3.getDate },
'End': { 'validator': sc3.getDate },
'Gain': { 'validator': float },
'GainFrequency': { 'validator': float },
'Remark': { 'validator': sc3.getBlob }
}
},
'sensorCalibration': {
'creator': seiscomp.datamodel.SensorCalibration,
'attributes': {
'SerialNumber': { 'validator': sc3.getString },
'Channel': { 'validator': int },
'Start': { 'validator': sc3.getDate },
'End': { 'validator': sc3.getDate },
'Gain': { 'validator': float },
'GainFrequency': { 'validator': float },
'Remark': { 'validator': sc3.getBlob }
}
},
'channel': {
'creator': seiscomp.datamodel.Stream_Create,
'attributes': {
'Code': { 'validator': sc3.getString },
'Start': { 'validator': sc3.getDate },
'End': { 'validator': sc3.getDate },
'Datalogger': { 'validator': sc3.getString },
'DataloggerSerialNumber': { 'validator': sc3.getString },
'DataloggerChannel': { 'validator': int },
'Sensor': { 'validator': sc3.getString },
'SensorSerialNumber': { 'validator': sc3.getString },
'SensorChannel': { 'validator': int },
'ClockSerialNumber': { 'validator': sc3.getString },
'SampleRateNumerator': { 'validator': int },
'SampleRateDenominator': { 'validator': int },
'Depth': { 'validator': float },
'Azimuth': { 'validator': float },
'Dip': { 'validator': float },
'Gain': { 'validator': float },
'GainFrequency': { 'validator': float },
'GainUnit': { 'validator': sc3.getString },
'Format': { 'validator': sc3.getString },
'Flags': { 'validator': sc3.getString },
'Restricted': { 'validator': sc3.getBool },
'Shared': { 'validator': sc3.getBool }
}
},
'location': {
'creator': seiscomp.datamodel.SensorLocation_Create,
'attributes': {
'Code': { 'validator': sc3.getString },
'Start': { 'validator': sc3.getDate },
'End': { 'validator': sc3.getDate },
"Latitude": { 'validator': float },
"Longitude": { 'validator': float },
"Elevation": { 'validator': float }
}
},
'station': {
'creator': seiscomp.datamodel.Station_Create,
'attributes': {
'Code': { 'validator': sc3.getString },
'Start': { 'validator': sc3.getDate },
'End': { 'validator': sc3.getDate },
'Description': { 'validator': sc3.getString },
'Latitude': { 'validator': float },
'Longitude': { 'validator': float },
'Elevation': { 'validator': float },
'Place': { 'validator': sc3.getString },
'Country': { 'validator': sc3.getString },
'Affiliation': { 'validator': sc3.getString },
'Type': { 'validator': sc3.getString },
'ArchiveNetworkCode': { 'validator': sc3.getString },
'Archive': { 'validator': sc3.getString },
'Restricted': { 'validator': sc3.getBool },
'Shared': { 'validator': sc3.getBool },
'Remark': { 'validator': sc3.getBlob }
}
},
'network': {
'creator': seiscomp.datamodel.Network_Create,
'attributes': {
'Code': { 'validator': sc3.getString },
'Start': { 'validator': sc3.getDate },
'End': { 'validator': sc3.getDate },
'Description': { 'validator': sc3.getString },
'Institutions': { 'validator': sc3.getString },
'Region': { 'validator': sc3.getString },
'Type': { 'validator': sc3.getString },
'NetClass': { 'validator': sc3.getString },
'Archive': { 'validator': sc3.getString },
'Comment': { 'validator': sc3.getString },
'Pid': { 'validator': sc3.getBlob },
'Restricted': { 'validator': sc3.getBool },
'Shared': { 'validator': sc3.getBool },
'Remark': { 'validator': sc3.getBlob }
}
},
'stationGroup': {
'creator': seiscomp.datamodel.StationGroup_Create,
'attributes': {
'Code': { 'validator': sc3.getString },
'Start': { 'validator': sc3.getDate },
'End': { 'validator': sc3.getDate },
'Description': { 'validator': sc3.getString },
'Type': { 'validator': sc3.getStationGroupType },
'Latitude': { 'validator': float },
'Longitude': { 'validator': float },
'Elevation': { 'validator': float },
}
},
'stationReference': {
'creator': seiscomp.datamodel.StationReference,
'attributes': {
'StationID': { 'validator': sc3.getString },
}
},
'datalogger': {
'creator': seiscomp.datamodel.Datalogger_Create,
'attributes': {
'Name': { 'validator': sc3.getString },
'Description': { 'validator': sc3.getString },
'DigitizerModel': { 'validator': sc3.getString },
'DigitizerManufacturer': { 'validator': sc3.getString },
'RecorderModel': { 'validator': sc3.getString },
'RecorderManufacturer': { 'validator': sc3.getString },
'ClockModel': { 'validator': sc3.getString },
'ClockManufacturer': { 'validator': sc3.getString },
'ClockType': { 'validator': sc3.getString },
'Gain': { 'validator': float },
'MaxClockDrift': { 'validator': float },
'Remark': { 'validator': sc3.getBlob }
}
},
'decimation': {
'creator': seiscomp.datamodel.Decimation,
'attributes': {
'SampleRateNumerator': { 'validator': int },
'SampleRateDenominator': { 'validator': int },
'AnalogueFilterChain': { 'validator': sc3.getBlob },
'DigitalFilterChain': { 'validator': sc3.getBlob }
}
},
'fir': {
'creator': seiscomp.datamodel.ResponseFIR_Create,
'attributes': {
"Name": { 'validator': sc3.getString },
"Gain": { 'validator': float },
"DecimationFactor": { 'validator': int },
"Delay": { 'validator': float },
"Correction": { 'validator': float },
"NumberOfCoefficients": { 'validator': int },
"Symmetry": { 'validator': sc3.getString },
"Coefficients": { 'validator': sc3.getRealArray },
"Remarks": { 'validator': sc3.getBlob }
}
},
'paz': {
'creator': seiscomp.datamodel.ResponsePAZ_Create,
'attributes': {
'Name': { 'validator': sc3.getString },
'Description': { 'validator': sc3.getString },
'Type': { 'validator': sc3.getString },
'Gain': { 'validator': float },
'GainFrequency': { 'validator': float },
'NormalizationFactor': { 'validator': float },
'NormalizationFrequency': { 'validator': float },
'NumberOfZeros': { 'validator': int },
'NumberOfPoles': { 'validator': int },
'Zeros': { 'validator': sc3.getComplexArray },
'Poles': { 'validator': sc3.getComplexArray },
'Remark': { 'validator': sc3.getBlob }
}
},
'sensor': {
'creator': seiscomp.datamodel.Sensor_Create,
'attributes': {
'Name': { 'validator': sc3.getString },
'Description': { 'validator': sc3.getString },
'Model': { 'validator': sc3.getString },
'Manufacturer': { 'validator': sc3.getString },
'Type': { 'validator': sc3.getString },
'Unit': { 'validator': sc3.getString },
'LowFrequency': { 'validator': float },
'HighFrequency': { 'validator': float },
'Response': { 'validator': sc3.getString },
'Remark': { 'validator': sc3.getBlob }
}
}
}
return(valid.get(mode))
def __init__(self, mode, child=[]):
self.sc3Mode = mode
self.sc3obj = None
self.sc3Valid = sc3._findValidOnes(mode)
self._sc3Childs = child
def _create(self):
if not self.sc3Valid:
raise Exception("Class without a type defined.")
return self.sc3Valid['creator']()
def sc3Att(self):
"""
This is the heart. You should return an dictionary of attributes to be
setted on the sc3 object. This dictionary will be used by the _fillSc3
method.
"""
raise Exception("Not Implemented !")
def sc3ValidKey(self, key):
if not self.sc3Valid:
raise Exception("Class without a type defined.")
return (key in self.sc3Valid['attributes'])
def sc3Resolv(self, inventory):
"""
In this method you should be able to resolv all the references in your
self object.
"""
pass
def sc3Derived(self, inventory):
"""
This method should generate and collect all the derived objects
(child on the inventory sense) that should be attributed to the self
object. By default on this virtual method is returns an empty array.
"""
objs = []
for obj in self._sc3Childs:
objs.append(obj.sc3Obj(inventory))
return objs
def sc3ID(self, inventory):
obj = self.sc3Obj(inventory)
return obj.publicID()
def sc3Obj(self, inventory):
if not self.sc3obj:
# Get a new object
obj = self._create()
# try to resolve REFERENCES to PUBLIC ID
self.sc3Resolv(inventory)
# Add the derived objects in
for dobj in self.sc3Derived(inventory):
obj.add(dobj)
# Fill the Attributes in
self._fillSc3(obj, self.sc3Att())
# # Only want to see Networks:
# if (('Code' in self.sc3Att().keys())
# and ('ArchiveNetworkCode' not in self.sc3Att().keys())
# and ('Azimuth' not in self.sc3Att().keys())
# ):
# print('DEBUG basesc3.py: sc3Obj:', self, self.sc3Att())
# Set as created
self.sc3obj = obj
# return the obj
return self.sc3obj

View File

@ -0,0 +1,506 @@
from __future__ import print_function
import sys
import csv
import re
from datetime import datetime
def getFieldNames(fd):
tmp = fd.readline().split(',')
fieldNames = []
for i in tmp:
fieldNames.append(i.strip())
return fieldNames
def quote(instr):
return '"'+instr+'"'
def hummanStr(instr):
return instr.replace("_"," ")
def parseDate(val):
if not val or val == "":
return None
date=val.replace("/", "-")
formats={ len("YYYY-JJJ") : "%Y-%j",
len("YYYY-MM-DD") : "%Y-%m-%d",
len("YYYY-JJJ:HHMM") : "%Y-%j:%H%M",
len("YYYY-JJJTHH:MM") : "%Y-%jT%H:%M",
len("YYYY-MM-DDTHH:MM") : "%Y-%m-%dT%H:%M",
len("YYYY-JJJTHH:MM:SS") : "%Y-%jT%H:%M:%S",
len("YYYY-MM-DDTHH:MM:SS") : "%Y-%m-%dT%H:%M:%S"}
try:
return datetime.strptime(date, formats[len(date)])
except Exception as e:
raise ValueError("invalid date: " + date + str(e))
def formatDate(date):
if not date:
return ""
if date.hour != 0 or date.minute != 0:
return datetime.strftime(date,"%Y/%j:%H%M")
return datetime.strftime(date,"%Y/%j")
def isPyVersion(major, minor):
return sys.version_info[0] == major and \
sys.version_info[1] == minor
class StationMappings:
def __init__(self, networkCode, stationList, filename):
self.networkCode = networkCode
self.stationList = stationList
self.stationMapping = {}
self.stationBreak = {}
if not filename: return
_rx_statmap = re.compile(r'\s*([^_]*)_([^=]*)=(\S*)\s*(from=([0-9]+/[0-9]+))?\s*(to=([0-9]+/[0-9]+))?\s*$')
fd = open(filename)
stationMapping = {}
try:
lineno = 0
try:
line = fd.readline()
lineno = 1
while line:
m = _rx_statmap.match(line)
if m is None:
raise Exception("parse error")
(sta, net, archive_net, from_def, from_year, to_def, to_year) = m.groups()
if net != self.networkCode:
line = fd.readline()
continue
if sta not in self.stationList:
line = fd.readline()
continue
try:
sta_net = stationMapping[sta]
except KeyError:
sta_net = []
stationMapping[sta] = sta_net
if from_def:
from_date = parseDate(from_year)
else:
from_date = None
if to_def:
to_date = parseDate(to_year)
else:
to_date = None
sta_net.append((from_date, to_date, archive_net))
line = fd.readline()
lineno += 1
except (Exception, TypeError, ValueError) as e:
raise Exception("%s:%d: %s" % (file, lineno, str(e)))
finally:
fd.close()
if len(stationMapping):
print("Found %d station mappings" % len(stationMapping), file=sys.stderr)
self.stationMapping = stationMapping
else:
## print("No station mappings found", file=sys.stderr)
pass
def dump(self, fdo, stationCode):
items = []
for (code, mapping) in self.stationMapping.items():
if stationCode and stationCode != code: continue
items.append(code)
for (fromDate, toDate, network) in mapping:
fdo.write("Sa: ArchiveNetworkCode=%s %s" % (network, code))
if fromDate:
fdo.write(" from=%s" % formatDate(fromDate))
if toDate:
fdo.write(" to=%s" % formatDate(toDate))
fdo.write("\n")
for code in items:
self.stationMapping.pop(code)
def getMappings(self, code, start, end):
mapping = []
if (code, start, end) not in self.stationBreak:
mapping.append([start, end])
else:
for (archiveNet, s, e, fr, to) in self.stationBreak[(code, start, end)]:
mapping.append([s, e])
return mapping
def parseStationLine(self, items):
stationCode = items[0].strip()
start = parseDate(items[10])
if len(items) > 11:
end = parseDate(items[11])
else:
end = None
if stationCode not in self.stationMapping:
## print("Skipping %s not in mapping list" % stationCode, file=sys.stderr)
return self.getMappings(stationCode, start, end)
for (fDate, tDate, archiveNet) in self.stationMapping[stationCode]:
if fDate and tDate:
raise Exception("Not Supported to and from definitions found.")
elif fDate:
if fDate >= start:
if (end and fDate <= end) or not end:
## print("Processing fDate %s %s %s [%s]" % (stationCode, start, end, fDate), file=sys.stderr)
if (stationCode, start, end) in self.stationBreak:
raise Exception("Crazy multiple station mapping for the same station line")
self.stationBreak[(stationCode, start, end)] = []
self.stationBreak[(stationCode, start, end)].append((self.networkCode, start, fDate, fDate, tDate))
self.stationBreak[(stationCode, start, end)].append((archiveNet, fDate, end, fDate, tDate))
## prin( " found mapping From -> %s (%s,%s)" % (fDate, stationCode, formatDate(start)), file=sys.stderr)
return self.getMappings(stationCode, start, end)
elif tDate:
if tDate >= start:
if (end and tDate <= end) or not end:
## print("Processing tDate %s %s %s [%s]" % (stationCode, start, end, tDate), file=sys.stderr)
if (stationCode, start, end) in self.stationBreak:
raise Exception("Crazy multiple station mapping for the same station line")
self.stationBreak[(stationCode, start, end)] = []
self.stationBreak[(stationCode, start, end)].append((archiveNet, start, tDate, fDate, tDate))
self.stationBreak[(stationCode, start, end)].append((self.networkCode, tDate, end, fDate, tDate))
## print(" found mapping To -> %s (%s,%s)" % (tDate, stationCode, formatDate(start)), file=sys.stderr)
return self.getMappings(stationCode, start, end)
else:
if (stationCode, start, end) in self.stationBreak:
raise Exception("Crazy multiple station mapping for the same station line")
self.stationBreak[(stationCode, start, end)] = []
self.stationBreak[(stationCode, start, end)].append((archiveNet, start, end, fDate, tDate))
## print(" found mapping ALL (%s,%s)" % (stationCode, formatDate(start)), file=sys.stderr)
return self.getMappings(stationCode, start, end)
## print("Ignored %s" % " ".join(items), file=sys.stderr)
return self.getMappings(stationCode, start, end)
class StationAttributes:
def __init__(self, networkCode, stationList, filename):
self.networkCode= networkCode
self.stationList = stationList
self.stationAttributeList = {}
if not filename: return
fd = open(filename)
attributes = {}
try:
try:
fieldNames = None
if isPyVersion(2, 3):
fieldNames = getFieldNames(fd)
for row in csv.DictReader(fd, fieldNames):
net_code = row['net_code']
if net_code != self.networkCode: continue
sta_code = row['sta_code']
if sta_code not in self.stationList: continue
start = parseDate(row['start'].strip())
if sta_code in attributes:
raise Exception("multiple %s found in %s" % (str((net_code, sta_code, row['start'])), filename))
del row['net_code']
del row['sta_code']
del row['start']
## Clean up input
for key in ['restricted', 'restricted_exc', 'place', 'country', 'affiliation', 'remark']:
row[key] = row[key].strip()
if len(row[key]) == 0:
del row[key]
if 'restricted' in row:
row['restricted'] = bool(int(row['restricted']))
if not row['restricted']: del (row['restricted'])
if row:
attributes[sta_code] = row
except KeyError as e:
raise Exception("column %s missing in %s" % (str(e), filename))
except (TypeError, ValueError) as e:
raise Exception("error reading %s: %s" % (filename, str(e)))
finally:
fd.close()
self.stationAttributeList = self.__build__(attributes)
print(" loaded attributes for %d stations on network %s (%s)" % (len(self.stationAttributeList), self.networkCode, filename), file=sys.stderr)
def __build__(self, attributes):
newat = {}
if not attributes:
## print("no station attributes found for network %s" % self.networkCode, file=sys.stderr)
return newat
for (code,row) in attributes.items():
nr = {}
for (k,v) in row.items():
if k == 'country': k = 'Country'
if k == 'place': k = 'Place'
if k == 'affiliation': k = 'Affiliation'
if k == 'remark': k = 'Remark'
if k == 'restricted': k = 'Restricted'
nr[k] = v
if nr:
newat[code] = nr
return newat
def get(self, code):
if self.stationAttributeList and code in self.stationAttributeList:
return self.stationAttributeList[code]
else:
return None
def __parseDescription__(self, description):
affiliation = None
place = None
country = None
description = hummanStr(description)
hasStation = True if description.find("Station") >= 0 else False
if hasStation:
affiliation = description[0:(description.index("Station"))].strip()
parts = description[description.index("Station")+7:].strip().split(",")
else:
parts = description.split(",")
if len(parts) > 1:
country = parts[len(parts)-1].strip()
parts = parts[0:(len(parts)-1)]
place = ",".join(parts)
else:
place = ",".join(parts)
# print("Country:", country, file=sys.stderr)
# print("Place:", place, file=sys.stderr)
# print("Affiliation:", affiliation, file=sys.stderr)
oui = {}
if country:
oui['Country'] = country
if place:
oui['Place'] = place
if affiliation:
oui['Affiliation'] = affiliation
return oui
def reorder_station_attr(self):
att = {}
if not self.stationAttributeList:
return None
for (code, row) in self.stationAttributeList.items():
for (k, v) in row.items():
if k == 'restricted_exc':
k = 'Restricted'
extra=',*,'+str(v)
v = (not row['Restricted']) if 'Restricted' in row else True
else:
extra= ''
try:
dk = att[k]
except:
dk = {}
att[k] = dk
try:
dv = dk[str(v)]
except:
dv = []
dk[str(v)] = dv
dv.append(code+extra)
return att
def parseStationLine(self, items, fStart = None, fEnd = None):
stationCode = items[0].strip()
description = items[1]
start = parseDate(items[10])
if stationCode not in self.stationList:
raise Exception("Station %s not in station list." % stationCode)
## Here we can force a different start & End values to the line
if fStart is not None:
start = fStart
if fEnd is not None:
end = fEnd
oui = None
at = self.get(stationCode)
#print >>sys.stderr,items, at, file=sys.stderr)
if not at:
## print(" Deriving attributes from description %s " % " ".join(items), file=sys.stderr)
at = self.__parseDescription__(description)
if at:
self.stationAttributeList[stationCode] = at
else:
for item in ['Affiliation', 'Country', 'Place']:
if item in at:
continue
if not oui:
## print(" Deriving attribute (%s) from description %s " % (item, " ".join(items)), file=sys.stderr)
oui = self.__parseDescription__(description)
if item in oui:
## print(" Setting attribute (%s) from description for %s = %s" % (item, stationCode, oui[item]), file=sys.stderr)
at[item] = oui[item]
else:
## print(" Empty %s for %s" % (item, stationCode), file=sys.stderr)
pass
country = at['Country'] if 'Country' in at else None
place = at['Place'] if 'Place' in at else None
return [place, country]
def dump(self, fdo, code):
if not code:
att = self.reorder_station_attr()
for (key,v) in att.items():
if key in ['Country', 'Place']: continue
for (value, s) in v.items():
fdo.write("Sa: %s=%s" % (key, quote(value)))
for station in s:
fdo.write(" %s" % (station))
fdo.write("\n")
else:
at = self.get(code)
if not at: return
if 'done' in at: return
at['done'] = 1 # Mark the item as printed
for (k,v) in at.items():
extra = ''
if k in [ 'done', 'Place', 'Country']: continue
if k in ['Affiliation']: v = quote(v)
if k == 'Restricted':
extra = ' %s,*,*' % code
if k == 'restricted_exc':
k = 'Restricted'
extra=',*,'+str(v)
v = (not at['Restricted']) if 'Restricted' in at else True
fdo.write("Sa: %s=%s %s%s\n" % (k,v,code,extra))
class NetworkAttributes:
def __build__(self, row):
#net_code,start,end,restricted,shared,net_class,type,institutions,region,remark
attList = {}
if row['start']:
self.start = row['start'].strftime("%Y/%j")
self.startDate = row['start']
self.hasStart = True
if row['end']:
self.end = row['end'].strftime("%Y/%j")
self.endDate = row['end']
self.hasEnd = True
if row['restricted'] != 0:
attList['Restricted'] = row['restricted']
if row['shared'] != 1:
attList['Shared'] = row['shared']
if row['net_class']:
attList['NetClass'] = row['net_class'].strip()
if row['type']:
attList['Type'] = row['type'].strip()
if row['institutions']:
attList['Institutions'] = row['institutions'].strip()
if row['region']:
attList['Region'] = row['region'].strip()
if row['remark']:
attList['Remark'] = row['remark'].strip()
self.networkAttributes.update(attList)
def parseNetworkLine(self, items):
if len(items) < 4 or len(items) > 6:
raise Exception("Invalid network line")
attList = {}
if items[1] == "none":
attList['Description'] = hummanStr(items[0])
else:
attList['Description'] = "%s (%s)" % (hummanStr(items[0]), items[1])
self.networkAttributes.update(attList)
def dump(self, fdo):
for (k,v) in self.networkAttributes.items():
if k in ['Description', 'Remark', 'Region', 'Institutions']:
v = quote(v)
fdo.write("Na: %s=%s\n" % (k,v))
def __init__(self, networkCode, filename):
self.networkCode = networkCode
self.networkAttributes = {}
self.start = None
self.end = None
self.hasStart = False
self.hasEnd = False
if not filename: return
fd = open(filename)
try:
try:
fieldNames = None
if isPyVersion(2, 3):
fieldNames = getFieldNames(fd)
for row in csv.DictReader(fd, fieldNames):
net_code = row['net_code']
if net_code != self.networkCode: continue
#del row['net_code']
#del row['start']
row['start'] = parseDate(row['start'])
row['end'] = parseDate(row['end'])
row['restricted'] = bool(int(row['restricted']))
row['shared'] = bool(int(row['shared']))
row['region'] = row['region'].strip()
row['remark'] = row['remark'].strip()
row['institutions'] = row['institutions'].strip()
self.__build__(row)
break
except KeyError as e:
raise Exception("column %s missing in %s" % (str(e), filename))
except (TypeError, ValueError) as e:
raise Exception("error reading %s: %s" % (filename, str(e)))
finally:
fd.close()
print(" found %d Attribute for network %s (%s)" % (len(self.networkAttributes), self.networkCode, filename), file=sys.stderr)

View File

@ -0,0 +1,160 @@
import re
from datetime import datetime
import string
from functools import reduce
class parsers(object):
@staticmethod
def parseString(val):
return val.strip()
@staticmethod
def _parse_paz(npaz, s):
_rx_paz = re.compile(r'\s*([0-9]*)\(\s*([^,]+),\s*([^)]+)\)\s*')
pos = 0
n = 0
c = []
while pos < len(s):
m = _rx_paz.match(s, pos)
if m is None:
raise Exception("error parsing PAZ at '" + s[pos:] + "'")
try:
if len(m.group(1)) > 0:
x = int(m.group(1))
else:
x = 1
rv = m.group(2)
iv = m.group(3)
float(rv)
float(iv)
except ValueError:
raise Exception("error parsing PAZ at '" + s[pos:] + "'")
for i in range(0, x):
c.append((rv, iv))
i = i
n += x
pos = m.end()
if n != npaz:
raise Exception("expected %d PAZ, found %d" % (npaz, n))
return c
@staticmethod
def _normalize(num, denom):
if num > denom:
(a, b) = (num, denom)
else:
(a, b) = (denom, num)
while b > 1:
(a, b) = (b, a % b)
if b == 0:
return (num / a, denom / a)
return (num, denom)
@staticmethod
def _rational(x):
sign, mantissa, exponent = x.as_tuple()
sign = (1, -1)[sign]
mantissa = sign * reduce(lambda a, b: 10 * a + b, mantissa)
if exponent < 0:
return parsers._normalize(mantissa, 10 ** (-exponent))
else:
return (mantissa * 10 ** exponent, 1)
@staticmethod
def _parseFloat(val, mi=None , ma= None):
number = float(val)
if (mi and number < mi) or (ma and number > ma):
raise Exception("Invalid Range")
return number
@staticmethod
def parseGain(val):
try:
return parsers._parseFloat(val, 0.0, None)
except Exception as e:
raise Exception("Invalid Gain: %s" % e)
@staticmethod
def parseLongitude(val):
try:
return parsers._parseFloat(val, -180.0, 180.0)
except Exception as e:
raise Exception("Invalid Longitude: %s" % e)
@staticmethod
def parseLatitude(val):
try:
return parsers._parseFloat(val, -90.0, 90.0)
except Exception as e:
raise Exception("Invalid Latitude: %s" % e)
@staticmethod
def parseDepth(val):
# Deepest mine ~ 5000 m
try:
return parsers._parseFloat(val, 0.0, 5000)
except Exception as e:
raise Exception("Invalid Depth: %s" % e)
@staticmethod
def parseElevation(val):
# Highest Everest ~8500 m
# Deepest Mariana ~11000 m
try:
return parsers._parseFloat(val, -11000, 9000)
except Exception as e:
raise Exception("Invalid Elevation: %s" % e)
@staticmethod
def parseDate(val):
date=val.replace("/", "-")
formats={ len("YYYY-JJJ") : "%Y-%j",
len("YYYY-MM-DD") : "%Y-%m-%d",
len("YYYY-JJJ:HHMM") : "%Y-%j:%H%M",
len("YYYY-JJJTHH:MM") : "%Y-%jT%H:%M",
len("YYYY-MM-DDTHH:MM") : "%Y-%m-%dT%H:%M",
len("YYYY-JJJTHH:MM:SS") : "%Y-%jT%H:%M:%S",
len("YYYY-MM-DDTHH:MM:SS") : "%Y-%m-%dT%H:%M:%S"}
try:
return datetime.strptime(date, formats[len(date)])
except Exception as e:
raise ValueError("invalid date: " + date + str(e))
@staticmethod
def parseLocationCode(val):
Code = val.strip()
if len(Code) > 2 or len(re.sub("[A-Z0-9-*?]","",Code)) > 0:
raise Exception("wrong code for location: %s" % Code)
return Code
@staticmethod
def parseStationCode(val):
Code = val.strip()
if not Code or len(Code) > 5 or len(re.sub("[A-Z0-9*?]","",Code)) > 0:
raise Exception("Wrong code for station: %s" % Code)
return Code
@staticmethod
def parseChannelCode(val):
Code = val.strip()
if not Code or len(Code) > 3 or len(re.sub("[A-Z0-9*?]","",Code)) > 0:
raise Exception("Wrong code for channel: %s" % Code)
return Code
@staticmethod
def parseNetworkCode(val):
Code = val.strip()
if not Code or len(Code) > 2 or len(re.sub("[A-Z0-9*?]","",Code)) > 0:
raise Exception("Wrong code for network: %s" % Code)
return Code

File diff suppressed because it is too large Load Diff

1645
lib/python/nettab/nettab.py Normal file

File diff suppressed because it is too large Load Diff

523
lib/python/nettab/nodesi.py Normal file
View File

@ -0,0 +1,523 @@
from __future__ import print_function
from .lineType import Dl, Se, Ff, Pz, Cl
from .basesc3 import sc3
import sys
class prefixable(object):
def adjust(self, prefix):
if prefix:
self.id = "%s:%s" % (prefix, self.id)
class Instruments(object):
def __init__(self, prefix=""):
self.keys = []
self.ses = {}
self.dls = {}
self.fls = {}
self.cls = {}
self._sensors = {}
self._datalogger = {}
self._filters = {}
self._Cal = {}
self._prefix = prefix
def sc3Objs(self):
objs = []
for s in list(self._sensors.values()):
objs.append(s.sc3Obj(self))
for s in list(self._datalogger.values()):
objs.append(s.sc3Obj(self))
for s in list(self._filters.values()):
objs.append(s.sc3Obj(self))
return objs
def add(self, obj):
where = None
if isinstance(obj, Se):
where = self.ses
elif isinstance(obj, Dl):
where = self.dls
elif isinstance(obj, Cl):
where = self.cls
elif isinstance(obj, Ff) or isinstance(obj, Pz):
where = self.fls
else:
raise Exception("Object type %s doesn't fir this class" % type(obj))
if obj.id in self.keys:
raise Exception("Object id %s already exist." % (obj))
self.keys.append(obj.id)
where[obj.id] = obj
return
def instrumentId(self, iid, gain):
if gain is None:
if iid in self.dls:
gain = self.dls[iid].gain
elif iid in self.ses:
gain = self.ses[iid].gain
else:
raise Exception("Instrument iid not found")
siid = "%s/g=%s" % (iid, int(float(gain)))
return siid
def loadDataloggerCalibrations(self, dsm, dsn, dch, dsg, start, end, dd):
cls = []
for cl in self.cls.values():
if cl.type != "L": continue
if cl.match(dsm, dsn):
cls.append(Calibration(cl, dch, start, end))
if len(cls) == 0:
if dsn in self.cls:
print("[%s] No calibrations found for serial number %s and model %s " % (dsm, dsn, dsm), file=sys.stderr)
return
diid = self.instrumentId(dsm, dsg)
try:
datalogger = self._datalogger[diid].sc3Obj(self)
if dd != datalogger.publicID():
raise Exception("Public Id doesn't match")
except:
raise Exception("[%s] Could not retrieve datalogger %s" % (dsm, diid))
for cl in cls:
if (dsm, dsn, dch, start, end) in self._Cal:
## print >> sys.stderr,"[%s] Skiping calibration channel %s" % (dsm, cl.channel)
continue
## print >> sys.stderr,"[%s] Adding calibration %s (%s)" % (dsm, cl.channel, dd)
datalogger.add(cl.sc3Obj(self))
self._Cal[(dsm, dsn, dch, start, end)] = cl
def loadSensorCalibrations(self, ssm, ssn, sch, ssg, start, end, ss):
cls = []
for cl in self.cls.values():
if cl.type != "S": continue
if cl.match(ssm, ssn):
cls.append(Calibration(cl, sch, start, end))
if len(cls) == 0:
if ssn in self.cls:
print("[%s] No calibrations found for serial number %s and model %s " % (ssm,ssn, ssm), file=sys.stderr)
return
siid = self.instrumentId(ssm, ssg)
try:
sensor = self._sensors[siid].sc3Obj(self)
if ss != sensor.publicID():
raise Exception("Public Id doesn't match")
except:
raise Exception("[%s] Could not retrieve sensor %s" % (ssm, siid))
for cl in cls:
if (ssm, ssn, sch, start, end) in self._Cal:
## print >> sys.stderr,"[%s] Skiping calibration channel %s" % (ssm, cl.channel)
continue
## print >> sys.stderr,"[%s] Adding calibration %s channel %s start %s" % (ssm, ssn, cl.channel, start)
sensor.add(cl.sc3Obj(self))
self._Cal[(ssm, ssn, sch, start, end)] = cl
def check(self, networks):
error = []
# Dataloggers check
error.append("* Dataloggers:")
for dl in self.dls.values():
error.extend(dl.check(self))
error.append("")
# Check fir filters
error.append("* Filters:")
for f in self.fls.values():
c = False
for dl in self.dls.values():
c = c or dl.use(f)
if c: break
if not c: error.append(" [%s] filter is not used" % f.id)
error.append("")
# Check the calibrations
error.append("* Calibrations:")
for cl in self.cls.values():
error.extend(cl.check(self))
error.append("")
error.append("* Sensors:")
for f in self.ses.values():
c = False
for network in networks.values():
for station in network.stations:
for location in station.locations:
for channel in location.channels:
c = c or channel.use(f)
if c: break
if c: break
if c: break
if c: break
if not c: error.append(" [%s] sensor is not used" % f.id)
error.append("")
error.append("* Dataloggers:")
for f in self.dls.values():
c = False
for network in networks.values():
c = c or network.use(f)
if c: break
if not c: error.append(" [%s] datalogger is not used" % f.id)
error.append("")
return error
def filterType(self, iid):
if iid not in self.keys:
raise Exception("[%s] Filter id not found" % iid)
if iid not in self.fls:
raise Exception("[%s] Object is not a filter" % iid)
obj = self.fls[iid]
if isinstance(obj, Ff):
fType = 'D'
elif isinstance(obj, Pz):
fType = obj.type
return fType
def filterID(self, iid):
if iid not in self.keys:
raise Exception("[%s] Filter id not found" % iid)
if iid not in self.fls:
raise Exception("[%s] Object is not a filter" % iid)
if iid not in self._filters:
obj = self.fls[iid]
if isinstance(obj, Pz):
## print >> sys.stderr," Generating new Filter (PZ): %s %s" % (iid,obj.type)
newFilter = Paz(obj)
elif isinstance(obj, Ff):
## print >> sys.stderr," Generating new Filter (Fir): %s" % (iid)
newFilter = Fir(obj)
newFilter.adjust(self._prefix)
if newFilter.id != self.prefix(iid):
raise Exception("Invalid filter created %s" % (iid))
self._filters[iid] = newFilter
return self._filters[iid].sc3ID(self)
def prefix(self, iid):
if self._prefix:
iid = "%s:%s" % (self._prefix, iid)
return iid
def dataloggerID(self, iid, gain = None):
if iid not in self.keys:
raise Exception("Object not found.")
if iid not in self.dls:
raise Exception("[%s] Object is not a datalogger" % iid)
diid = self.instrumentId(iid, gain)
if diid not in self._datalogger:
## print >> sys.stderr,"Generating datalogger %s -> %s" % (iid, diid)
newDatalogger = Dataloger(self.dls[iid], gain)
newDatalogger.adjust(self._prefix)
if newDatalogger.id != self.prefix(diid):
raise Exception("Invalid datalogger created %s %s" % (iid, diid))
self._datalogger[diid] = newDatalogger
return self._datalogger[diid].sc3ID(self)
def sensorID(self, iid, gain = None):
if iid not in self.keys:
raise Exception("Object not found.")
if iid not in self.ses:
raise Exception("[%s] Object is not a sensor" % iid)
diid = self.instrumentId(iid, gain)
if diid not in self._sensors:
## print >> sys.stderr,"Generating Sensor %s -> %s" % (iid, diid)
newSensor = Sensor(self.ses[iid], gain)
newSensor.adjust(self._prefix)
if newSensor.id != self.prefix(diid):
raise Exception("Invalid sensor created %s %s" % (iid, diid))
self._sensors[diid] = newSensor
return self._sensors[diid].sc3ID(self)
def _findObject(self, objID, where):
obj = None
for ob in where.values():
obj = ob.sc3Obj(self)
if obj.publicID() == objID:
break;
if not obj:
raise Exception("Object not found: %s " % objID)
return obj
def _findCallibration(self, obj, count, serialNumber, channel, start):
if serialNumber is None:
return None
if channel is None:
return None
for cal in [obj(i) for i in range(0, count)]:
if cal.serialNumber() == serialNumber and cal.channel() == channel:
return cal.gain()
return None
def _sensorGain(self, seID, serialNumber, channel, start):
sensor = self._findObject(seID, self._sensors)
if not sensor:
raise Exception("Not found %s" % seID)
sensorFilter = self._findObject(sensor.response(), self._filters)
if not sensorFilter:
raise Exception("Not found %s" % seID)
gainFrequency = sensorFilter.gainFrequency()
try:
gainUnit = sensor.unit()
except:
print("[%s] No gain unit supplied" % seID, file=sys.stderr)
gainUnit = None
gain = self._findCallibration(sensor.sensorCalibration, sensor.sensorCalibrationCount(), serialNumber, channel, start)
if gain is not None:
## print >> sys.stderr,'[%s] Using sensor gain from calibration %s' % (serialNumber, gain)
pass
else:
gain = sensorFilter.gain()
return (gain, gainFrequency, gainUnit)
def _dataloggerGain(self, dtID, serialNumber, channel, Numerator, Denominator, start):
datalogger = self._findObject(dtID, self._datalogger)
gain = self._findCallibration(datalogger.dataloggerCalibration, datalogger.dataloggerCalibrationCount(), serialNumber, channel, start)
if gain is not None:
##print >> sys.stderr,'[%s] Using datalogger gain from calibration %s' % (serialNumber, gain)
pass
else:
gain = datalogger.gain()
decimation = None
for i in range(0,datalogger.decimationCount()):
decimation = datalogger.decimation(i)
if decimation.sampleRateNumerator() == Numerator and decimation.sampleRateDenominator() == Denominator:
break
decimation = None
if not decimation:
raise Exception("Decimation not found %s/%s" % (Numerator, Denominator))
af = decimation.analogueFilterChain().content().split()
df = decimation.digitalFilterChain().content().split()
for fiID in af:
g = self._findObject(fiID, self._filters).gain()
#print >> sys.stderr,"Multiplying by %s %s" % (fiID, g)
gain = gain * g
for fiID in df:
g = self._findObject(fiID, self._filters).gain()
#print >> sys.stderr,"Multiplying by %s %s" % (fiID, g)
gain = gain * g
return gain
def getChannelGainAttribute(self, dtID, seID, dtSerialNumber, seSerialNumber, dtChannel, seChannel, Numerator, Denominator, channelStart):
if not dtID or not seID:
raise Exception("Empty instruments ID supplied.")
(sensorGain, sensorFrequency,sensorUnit) = self._sensorGain(seID, seSerialNumber, seChannel, channelStart)
dataloggerGain = self._dataloggerGain(dtID, dtSerialNumber, dtChannel, Numerator, Denominator, channelStart)
att = {}
att['Gain'] = sensorGain * dataloggerGain
if sensorFrequency is not None:
att['GainFrequency'] = sensorFrequency
if sensorUnit is not None:
att['GainUnit'] = sensorUnit
return att
class Paz(sc3, prefixable):
def __init__(self, pz):
sc3.__init__(self, 'paz')
self.id = pz.id
self.att = pz.getAttributes()
def sc3Att(self):
att = {}
att['Name'] = self.id
for (key,value) in self.att.items():
if not self.sc3ValidKey(key) or key in att:
print(" [%s] [%s] Ignoring Attribute %s = %s " % (self.sc3Mode, self.id, key,value), file=sys.stderr)
continue
att[key] = value
return att
class Sensor(sc3, prefixable):
def __init__(self, se, gain = None):
sc3.__init__(self, 'sensor')
self.baseid = se.id
self.att = se.getAttributes()
self.pz = se.generatePz(gain)
self.id = "%s/g=%s" % (self.baseid, int(float(self.pz.gain)))
def sc3Resolv(self, inventory):
try:
self.att['Response'] = inventory.filterID(self.pz.id)
## print >> sys.stderr,"Re-used a sensor pole-zero"
except:
inventory.add(self.pz)
self.att['Response'] = inventory.filterID(self.pz.id)
def sc3Att(self):
att = {}
att['Name'] = self.id
for (key, value) in self.att.items():
if not self.sc3ValidKey(key) or key in att:
print(" [%s] [%s] ignoring Attribute %s = %s " % (self.sc3Mode, self.id, key, value), file=sys.stderr)
continue
att[key] = value
## Forcing needed description on the sensor
if 'Description' not in att:
att['Description'] = self.id
return att
class Fir(sc3, prefixable):
def __init__(self, ff):
sc3.__init__(self, 'fir')
self.id = ff.id
self.gain = ff.gain
self.att = ff.getAttributes()
def sc3Att(self):
att = {}
att['Name'] = self.id
for (key,value) in self.att.items():
if not self.sc3ValidKey(key) or key in att :
print(" [%s] [%s] Ignoring Attribute %s = %s " % (self.sc3Mode, self.id, key,value), file=sys.stderr)
continue
att[key] = value
return att
class Decimation(sc3):
def __init__(self, numerator, decimator, dl):
sc3.__init__(self, 'decimation')
self._numerator = numerator
self._denominator = decimator
self.chains = dl.chains[(numerator, decimator)]
self.att = {}
def sc3Resolv(self, inventory):
sequence = {}
sequence['A'] = []
sequence['D'] = []
for stage in self.chains:
sid = inventory.filterID(stage)
ADtype = inventory.filterType(stage)
sequence[ADtype].append(sid)
self.att['AnalogueFilterChain'] = " ".join(sequence['A'])
self.att['DigitalFilterChain'] = " ".join(sequence['D'])
def sc3Att(self):
att = {}
att['SampleRateNumerator'] = self._numerator
att['SampleRateDenominator'] = self._denominator
att.update(self.att)
return att
class Dataloger(sc3, prefixable):
def __init__(self, dl, gain = None):
dcs = []
sc3.__init__(self, 'datalogger', dcs)
if gain:
self.gain = gain
else:
self.gain = dl.gain
self.att = dl.getAttributes()
self.id = "%s/g=%s" % (dl.id, int(float(self.gain)))
self.maxClockDrift = dl.mcld
if dl.chains:
for (num, dec) in dl.chains:
dcs.append(Decimation(num, dec, dl))
self.dcs = dcs
else:
print("[%s] Datalogger %s has no stages." % (self.id, dl), file=sys.stderr)
def sc3Att(self):
att = {}
att['Name'] = self.id
att['Gain'] = self.gain
att['MaxClockDrift'] = self.maxClockDrift
for (key,value) in self.att.items():
if not self.sc3ValidKey(key) or key in att:
print(" [%s] [%s] ignoring Attribute %s = %s " % (self.sc3Mode, self.id, key, value), file=sys.stderr)
continue
att[key] = value
## Forcing needed description on the sensor
if 'Description' not in att:
att['Description'] = self.id
return att
class Calibration(sc3):
def __init__(self, cl, channel, start, end):
if cl.type == "S":
sc3.__init__(self, "sensorCalibration")
else:
sc3.__init__(self, "dataloggerCalibration")
if channel < 0 or channel >= cl.channelCount:
raise Exception("Invalid channel for calibration [%s]" % channel)
self.start = start
self.end = end
self.channel = channel
self.id = cl.id
self.att = cl.getAttributes(channel)
def sc3Att(self):
att = {}
att['SerialNumber'] = self.id
att['Start'] = self.start
if self.end:
att['End'] = self.end
for (key, value) in self.att.items():
if not self.sc3ValidKey(key) or key in att:
print(" [%s] [%s] Ignoring Attribute %s = %s " % (self.sc3Mode, self.id, key,value), file=sys.stderr)
continue
att[key] = value
return att

View File

@ -0,0 +1,489 @@
from __future__ import print_function
from .lineType import Sl, Nw, Sr, Sg
from .nodesi import Instruments
from .basesc3 import sc3
import sys
debug = 0
class DontFit(Exception):
def __init__(self, message):
Exception.__init__(self, message)
class nslc(object):
def __init__(self):
self.start = None
self.end = None
self.code = None
def __overlap__(self, another):
if self.end:
if self.end > another.start:
if not another.end or self.start < another.end:
return True
else:
if not another.end or self.start < another.end:
return True
return False
def _span(self):
return "%s / %s" % (self.start, self.end)
def sc3Att(self):
att = {}
att['Start'] = self.start
if self.end:
att['End'] = self.end
att['Code'] = self.code
for (key,value) in self.att.items():
if not self.sc3ValidKey(key) or key in att:
print("[%s] type %s ignoring attribute %s = %s " % (self.code, self.sc3Mode, key,value), file=sys.stderr)
continue
att[key] = value
return att
def _cmptime(t1, t2):
if t1 is None and t2 is None:
return 0
elif t2 is None or (t1 is not None and t1 < t2):
return -1
elif t1 is None or (t2 is not None and t1 > t2):
return 1
return 0
class StationGroup(nslc,sc3):
def __str__(self):
return "%s" % (self.code)
def __init__(self, sg):
if not isinstance(sg,Sg):
return False
self.stationReferences = []
sc3.__init__(self, 'stationGroup', self.stationReferences)
self.code = sg.code
self.start = sg.start
self.end = sg.end
self.att = sg.getStationGroupAttributes()
self.srdata = []
def __match__(self, sr):
if not isinstance(sr,Sr):
return False
return (_cmptime(sr.start, self.end) <= 0 and _cmptime(sr.end, self.start) >= 0)
def conflict(self, another):
if self.code != another.code:
return False
if self.end:
if self.end <= another.start:
return False
if another.end and another.end <= self.start:
return False
else:
if another.end and another.end <= self.start:
return False
return True
def Sr(self, sr):
self.srdata.append((sr.ncode, sr.scode, sr.start, sr.end))
def sc3Resolv(self, inventory):
for (ncode, scode, start, end) in self.srdata:
try:
for stationID in inventory.resolveStation(ncode, scode, start, end):
st = StationReference(self, stationID)
self.stationReferences.append(st)
except Exception as e:
sys.stderr.write(str(e) + "\n")
class StationReference(sc3):
def __str__(self):
return "%s" % (self.att["StationID"])
def __init__(self, stationGroup, stationID):
self.stationGroup = stationGroup
sc3.__init__(self, 'stationReference')
self.att = { "StationID": stationID }
def sc3Att(self):
return self.att
class Network(nslc, sc3):
def __str__(self):
return "%s" % (self.code)
def __init__(self, nw):
if not isinstance(nw,Nw):
return False
self.stations = []
sc3.__init__(self, 'network', self.stations)
nslc.__init__(self)
self.code = nw.code
self.start = nw.start
self.end = nw.end
self.att = nw.getNetworkAttributes()
def __match__(self, sl):
if not isinstance(sl,Sl):
return False
if sl.start < self.start:
return False
if self.end:
if not sl.end or sl.end > self.end:
return False
return True
def conflict(self, another):
if self.code != another.code:
return False
if self.end:
if self.end <= another.start:
return False
if another.end and another.end <= self.start:
return False
else:
if another.end and another.end <= self.start:
return False
return True
def Sl(self, sl):
if not self.__match__(sl):
raise DontFit(" Object doesn't fit this network object.")
inserted = False
for sta in self.stations:
try:
where = "%s" % (sta._span())
sta.Sl(sl)
if debug: print("[%s] inserted at %s -> %s" % (self, where, sta._span()), file=sys.stderr)
inserted = True
for other in self.stations:
if other is sta: continue
if other.conflict(sta):
raise Exception("I Station conflict with already existing station (%s/%s/%s)" % (other, other.start, other.end))
break
except DontFit:
pass
if not inserted:
st = Station(self, sl)
if debug: print("[%s] created new station %s %s" % (self, st, st._span()), file=sys.stderr)
for sta in self.stations:
if sta.conflict(st):
raise Exception("Station conflict with already existing station (%s/%s/%s)" % (sta, sta.start, sta.end))
self.stations.append(st)
def check(self, i):
error = []
for station in self.stations:
error.extend(station.check(i))
return error
def use(self, iid):
c = False
for station in self.stations:
c = c or station.use(iid)
if c: break
return c
class Station(nslc, sc3):
def __str__(self):
return "%s.%s" % (self.network.code, self.code)
def __init__(self, network, sl):
if not isinstance(sl,Sl):
return False
self.locations = []
self.network = network
sc3.__init__(self, 'station', self.locations)
# I load myself as a station
nslc.__init__(self)
self.code = sl.code
self.start = sl.start
self.end = sl.end
self.att = sl.getStationAttributes()
# Further parse to generate my locations
self.Sl(sl)
def __match__(self, obj):
if not isinstance(obj,Sl):
return False
# Check code
if obj.code != self.code:
return False
# Attributes
att = obj.getStationAttributes()
for at in att:
# Make sure that all attributes in Sl-line are here
if at not in self.att:
return False
# And they match
if att[at] != self.att[at]:
return False
# Make sure that there is no other attribute here that is not on Sl-line
for at in self.att:
if at not in att:
return False
return True
def __adjustTime__(self, sl):
if sl.start < self.start:
self.start = sl.start
if not self.end:
return
if sl.end and sl.end < self.end:
return
self.end = sl.end
def conflict(self, another):
if not isinstance(another, Station):
raise Exception("Cannot compare myself with %s" % type(another))
if self.code != another.code:
return False
if not self.__overlap__(another):
return False
return True
def use(self, iid):
c = False
for location in self.locations:
c = c or location.use(iid)
if c: break
return c
def check(self, i):
error = []
for location in self.locations:
error.extend(location.check(i))
return error
def Sl(self, sl):
if not self.__match__(sl):
raise DontFit(" sl doesn't fit this station %s/%s_%s." % (self.code, self.start, self.end))
# Handle Time Adjustments
self.__adjustTime__(sl)
# Handle Locations
inserted = False
for loc in self.locations:
try:
where = loc._span()
loc.Sl(sl)
if debug: print(" [%s] inserted at %s -> %s" % (self, where, loc._span()), file=sys.stderr)
inserted = True
for other in self.locations:
if other is loc: continue
if other.conflict(loc):
raise Exception("Location conflict with already existing location")
break
except DontFit:
pass
if not inserted:
loc = Location(self, sl)
if debug: print(" [%s] created new location %s %s" % (self, loc, loc._span()), file=sys.stderr)
for lc in self.locations:
if lc.conflict(loc):
raise Exception("Location conflict with already existing location")
self.locations.append(loc)
def sc3Att(self):
att = nslc.sc3Att(self)
## Make sure that we set the Remark
if 'ArchiveNetworkCode' not in att:
att['ArchiveNetworkCode'] = self.network.code
if 'Remark' not in att:
att['Remark'] = ""
return att
class Location(nslc, sc3):
def __str__(self):
return "%s.%s.%s" % (self.station.network.code, self.station.code, self.code)
def __init__(self, station, sl):
if not isinstance(sl, Sl):
return False
self.channels = []
sc3.__init__(self, 'location', self.channels)
nslc.__init__(self)
self.station = station
self.code = sl.location
self.start = sl.start
self.end = sl.end
self.att = sl.getLocationAttributes()
self.Sl(sl)
def __adjustTime__(self, sl):
if sl.start < self.start:
self.start = sl.start
if not self.end:
return
if sl.end and sl.end < self.end:
return
self.end = sl.end
def __match__(self, obj):
if not isinstance(obj, Sl):
return False
if obj.location != self.code:
return False
# Attributes
att = obj.getLocationAttributes()
for at in att:
# Make sure that all attributes in Sl-line are here
if at not in self.att:
return False
# And they match
if att[at] != self.att[at]:
return False
# Make sure that there is no other attribute here that is not on Sl-line
for at in self.att:
if at not in att:
return False
return True
def conflict(self, another):
if not isinstance(another, Location):
raise Exception("Cannot compare myself with %s" % type(another))
if self.code != another.code:
return False
if not self.__overlap__(another):
return False
return True
def use(self, iid):
c = False
for channel in self.channels:
c = c or channel.use(iid)
if c: break
return c
def check(self, i):
error = []
for channel in self.channels:
error.extend(channel.check(i))
return error
def Sl(self, sl):
if not self.__match__(sl):
raise DontFit(" This obj doesn't match this Location '%s'" % self.code)
# Handle Time Adjustments
self.__adjustTime__(sl)
# Create Channels
for code in sl.channels:
channel = (Channel(self, code, sl))
if debug: print(" [%s] created new channel %s/%s" % (self, channel, channel._span()), file=sys.stderr)
for echan in self.channels:
if echan.conflict(channel):
raise Exception("[%s] channel %s conflict with already existing channel" % (self, code))
#print >>sys.stderr," Channel %s appended at '%s'" % (code, self.code)
self.channels.append(channel)
class Channel(nslc, sc3):
def __str__(self):
return "%s.%s.%s.%s" % (self.location.station.network.code, self.location.station.code, self.location.code, self.code)
def __init__(self, location, code, sl):
sc3.__init__(self, 'channel')
self.location = location
nslc.__init__(self)
self.code = code
self.start = sl.start
self.end = sl.end
self.att = sl.getChannelAttributes(self.code)
## Bring the Instrument gains to the channel level
self._sensorGain = sl.sensorGain
self._dataloggerGain = sl.dataloggerGain
def conflict(self, another):
if not isinstance(another, Channel):
raise Exception("Cannot compare myself with %s" % type(another))
if self.code != another.code:
return False
if not self.__overlap__(another):
return False
return True
def use(self, iid):
if 'Datalogger' in self.att and iid == self.att['Datalogger']: return True
if 'Sesor' in self.att and iid == self.att['Sensor']: return True
return False
def check(self, i):
good = []
if not isinstance(i, Instruments):
raise Exception("Invalid instrument object")
if not self.att['Datalogger'] in i.keys:
good.append("no Datalogger")
if not self.att['Sensor'] in i.keys:
good.append("no Sensor")
if good:
good = [ " [%s] %s" % (self, "/".join(good)) ]
return good
def sc3Resolv(self, inventory):
if not inventory:
print("[%s] Warning, inventory not supplied" % self.code, file=sys.stderr)
return
try:
ssm = self.att['Sensor']
ssg = self._sensorGain
sch = self.att['SensorChannel']
ssn = self.att["SensorSerialNumber"] if "SensorSerialNumber" in self.att else None
# Sensor publicID
ss = inventory.sensorID(ssm, ssg)
self.att['Sensor'] = ss
# Sensor Calibration
inventory.loadSensorCalibrations(ssm, ssn, sch, ssg, self.start, self.end, ss)
except Exception as e:
print("[%s] Sensor Resolution Error %s" % (self, e), file=sys.stderr)
ss = None
try:
dsm = self.att['Datalogger']
dsg = self._dataloggerGain
dch = self.att['DataloggerChannel']
dsn = self.att['DataloggerSerialNumber'] if 'DataloggerSerialNumber' in self.att else None
dt = inventory.dataloggerID(dsm, dsg)
self.att['Datalogger'] = dt
inventory.loadDataloggerCalibrations(dsm, dsn, dch, dsg, self.start, self.end, dt)
except Exception as e:
print("[%s] Datalogger Resolution Error %s" % (self, e), file=sys.stderr)
dt = None
try:
up = self.att['SampleRateNumerator']
down = self.att['SampleRateDenominator']
self.att.update(inventory.getChannelGainAttribute(dt, ss, dsn, ssn, dch, sch, up, down, self.start))
except Exception as e:
print("[%s] Cannot find gain back for the channel: %s" % (self,e), file=sys.stderr)

View File

@ -0,0 +1,65 @@
import time, datetime
def _cmptime(t1, t2):
if t1 is None and t2 is None:
return 0
elif t2 is None or (t1 is not None and t1 < t2):
return -1
elif t1 is None or (t2 is not None and t1 > t2):
return 1
return 0
def _time2datetime(t):
result = datetime.datetime(*time.strptime(t.toString("%Y-%m-%dT%H:%M:00Z"), "%Y-%m-%dT%H:%M:%SZ")[0:6])
result += datetime.timedelta(microseconds=float(t.toString("%S.%f")) * 1000000)
class StationResolver(object):
def __init__(self):
self.stationMap = {}
self.initialStations = set()
def collectStations(self, inventory, initial = False):
for ni in range(inventory.networkCount()):
n = inventory.network(ni)
for si in range(n.stationCount()):
s = n.station(si)
try:
if initial:
self.initialStations.add((n.code(), s.code()))
else:
self.initialStations.remove((n.code(), s.code()))
del self.stationMap[(n.code(), s.code())]
except KeyError:
pass
try:
item = self.stationMap[(n.code(), s.code())]
except KeyError:
item = []
self.stationMap[(n.code(), s.code())] = item
start = _time2datetime(s.start())
try: end = _time2datetime(s.end())
except: end = None
item.append((start, end, s.publicID()))
def resolveStation(self, ncode, scode, start, end):
result = set()
try:
for (s, e, publicID) in self.stationMap[(ncode, scode)]:
if _cmptime(start, e) <= 0 and _cmptime(end, s) >= 0:
result.add(publicID)
except KeyError:
pass
if not result:
raise Exception("Station reference %s,%s cannot be resolved" % (ncode, scode))
return result

364
lib/python/nettab/tab.py Normal file
View File

@ -0,0 +1,364 @@
from __future__ import print_function
from .lineType import Nw, Sg, Sr, Sl, Sa, Na, Dl, Se, Ff, Pz, Ia, Cl
from .nodesi import Instruments
from .nodesnslc import Network, StationGroup, DontFit
import seiscomp.datamodel, seiscomp.io, seiscomp.client
from .stationResolver import StationResolver
import sys
import os
import glob
import re
__VERSION__ = "0.1"
class Tab(object):
def version(self):
return __VERSION__
def __init__(self, instrumentPrefix = None, defaultsFile = None, filterFolder = None, xmlFolder = None, database = None):
self.i = Instruments(instrumentPrefix)
self.n = {}
self.g = {}
self.sas = []
self.nas = []
self.ias = []
self.stationResolver = StationResolver()
self._filterFolder = None
print("Starting tab2inv version %s" % self.version(), file=sys.stderr)
if not filterFolder:
print(" Warning, not filter folder supplied.", file=sys.stderr)
else:
if not os.path.isdir(filterFolder):
raise Exception("Filter folder does not exist.")
self._filterFolder = filterFolder
if defaultsFile is not None:
self._defaults(defaultsFile)
if database is not None:
self._loadDatabase(database)
if xmlFolder is not None:
self._loadXml(xmlFolder)
def _defaults(self, filename):
sas = []
ias = []
nas = []
try:
fd = open(filename)
print(" Parsing defaults file: %s" % (filename), file=sys.stderr)
for line in fd:
line = line.strip()
if not line or line[0] == "#": continue
(Type, Content) = line.split(":",1)
if Type == "Nw":
raise Exception("Defaults file can only contain attributes")
elif Type == "Na":
nas.append(Na(Content))
elif Type == "Sa":
sas.append(Sa(Content))
elif Type == "Sl":
raise Exception("Defaults file can only contain attributes")
elif Type == "Ia":
ias.append(Ia(Content))
elif Type == "Se":
raise Exception("Defaults file can only contain attributes")
elif Type == "Dl":
raise Exception("Defaults file can only contain attributes")
elif Type == "Cl":
raise Exception("Defaults file can only contain attributes")
elif Type == "Ff":
raise Exception("Defaults file can only contain attributes")
elif Type == "If":
raise Exception("Defaults file can only contain attributes")
elif Type == "Pz":
raise Exception("Defaults file can only contain attributes")
else:
print(" Ignored line", line, file=sys.stderr)
fd.close()
except Exception as e:
print(" Warning: %s" % e, file=sys.stderr)
pass
self.sas = sas
self.nas = nas
self.ias = ias
def _loadDatabase(self, dbUrl):
m = re.match("(?P<dbDriverName>^.*):\/\/(?P<dbAddress>.+?:.+?@.+?\/.+$)", dbUrl)
if not m:
raise Exception("error in parsing SC3 DB url")
db = m.groupdict()
try:
registry = seiscomp.system.PluginRegistry.Instance()
registry.addPluginName("dbmysql")
registry.loadPlugins()
except Exception as e:
raise #"Cannot load database driver: %s"
dbDriver = seiscomp.io.DatabaseInterface.Create(db["dbDriverName"])
if dbDriver is None:
raise Exception("Cannot find database driver " + db["dbDriverName"])
if not dbDriver.connect(db["dbAddress"]):
raise Exception("Cannot connect to database at " + db["dbAddress"])
dbQuery = seiscomp.datamodel.DatabaseQuery(dbDriver)
if dbQuery is None:
raise Exception("Cannot get DB query object")
print(" Loading inventory from database ... ", end=' ', file=sys.stderr)
inventory = seiscomp.datamodel.Inventory()
dbQuery.loadNetworks(inventory)
for ni in range(inventory.networkCount()):
dbQuery.loadStations(inventory.network(ni))
print("Done.", file=sys.stderr)
if inventory:
self.stationResolver.collectStations(inventory, True)
def _loadXml(self, folder):
print(" Loading inventory from XML file ... ", end=' ', file=sys.stderr)
for f in glob.glob(os.path.join(folder, "*.xml")):
ar = seiscomp.io.XMLArchive()
ar.open(f)
inventory = seiscomp.datamodel.Inventory_Cast(ar.readObject())
ar.close()
if inventory:
self.stationResolver.collectStations(inventory)
print("Done.", file=sys.stderr)
def digest(self, tabFilename):
sas = []
ias = []
nw = None
n = None
g = None
print(" Parsing file: %s" % (tabFilename), file=sys.stderr)
if not tabFilename or not os.path.isfile(tabFilename):
raise Exception("Supplied filename is invalid.")
if tabFilename in list(self.n.keys()) or tabFilename in list(self.g.keys()):
raise Exception("File %s is already digested." % tabFilename)
filename = 1
try:
fd = open(tabFilename)
for line in fd:
obj = None
line = line.strip()
if not line or line[0] == "#": continue
if str(line).find(":") == -1:
raise Exception("Invalid line format '%s'" % line)
(Type, Content) = line.split(":",1)
if Type == "Nw":
if n or g:
raise Exception("Network or Station Group already defined, only one Hr line should be defined per file.")
try:
nw = Nw(Content)
except Exception as e:
raise Exception("Error while creating nw from '%s': %s" % (Content, e))
try:
for na in self.nas: nw.Na(na) # Defaults
except Exception as e:
raise Exception("Error while loading (defaults) %s into %s: %s" % (na, nw, e))
elif Type == "Sg":
if n or g:
raise Exception("Network or Station Group already defined, only one Hr line should be defined per file.")
try:
sg = Sg(Content)
except Exception as e:
raise Exception("Error while creating sg from '%s': %s" % (Content, e))
try:
for na in self.nas: sg.Na(na) # Defaults
except Exception as e:
raise Exception("Error while loading (defaults) %s into %s: %s" % (na, sg, e))
elif Type == "Na":
if not nw and not sg:
raise Exception("No network defined, no Na line before a Hr line.")
if n or g:
raise Exception("No Na lines after a Sl line. Network has already been defined.")
try:
na = Na(Content)
except Exception as e:
raise Exception("Error while creating na from '%s': %s" % (Content, e))
if nw:
try:
nw.Na(na)
except Exception as e:
raise Exception("Error while adding %s to %s: %s" % (na, nw, e))
else:
try:
sg.Na(na)
except Exception as e:
raise Exception("Error while adding %s to %s: %s" % (na, sg, e))
elif Type == "Sa":
if not nw:
raise Exception("Not Sa line before a hr line allowed.")
try:
sas.append(Sa(Content))
except Exception as e:
raise Exception("Error while creating Sa from '%s': %s" % (Content,e))
elif Type == "Sl":
if not n:
if not nw:
raise Exception("No network defined, Hr line should come before station line.")
else:
n = Network(nw)
for (filename, network) in self.n.items():
if network.conflict(n):
raise Exception("Network already defined %s (%s)-(%s) by file %s." % (network.code, network.start, network.end, filename))
try:
sl = Sl(Content)
except Exception as e:
raise Exception("Error while creating sl from '%s': %s" % (Content, e))
# Fill in attributes
try:
for sa in self.sas: sl.Sa(sa) # Defaults
except Exception as e:
raise Exception("Error while loading (default) %s into %s: %s" % (sa, sl, e))
try:
for sa in sas: sl.Sa(sa) # Collected
except Exception as e:
raise Exception("Error while loading %s into %s: %s" % (str(sa), str(sl), e))
# Digest by Station
try:
n.Sl(sl)
except DontFit:
raise Exception("%s does not fit in %s" % (sl, n))
except Exception as e:
raise Exception("Error while loading %s into %s: %s" % (sl, n, e))
elif Type == "Sr":
if not g:
if not sg:
raise Exception("No station group defined, Sg line should come before station reference line.")
else:
g = StationGroup(sg)
for (filename, stationGroup) in self.g.items():
if stationGroup.conflict(g):
raise Exception("Station group already defined %s (%s)-(%s) by file %s." % (stationGroup.code, stationGroup.start, stationGroup.end, filename))
for (filename, network) in self.n.items():
if network.conflict(g):
raise Exception("Station group conflict network already defined %s (%s)-(%s) by file %s." % (network.code, network.start, network.end, filename))
try:
sr = Sr(Content)
except Exception as e:
raise Exception("Error while creating sr from '%s': %s" % (Content, e))
# Digest by Station Reference
try:
g.Sr(sr)
except DontFit:
raise Exception("%s does not fit in %s" % (sr, n))
except Exception as e:
raise Exception("Error while loading %s into %s: %s" % (sr, n, e))
elif Type == "Ia":
ias.append(Ia(Content))
elif Type == "Se":
obj = Se(Content)
elif Type == "Dl":
obj = Dl(Content)
elif Type == "Cl":
obj = Cl(Content)
elif Type == "Ff":
obj = Ff(self._filterFolder, Content)
elif Type == "If":
obj = Pz(Content,'D')
elif Type == "Pz":
obj = Pz(Content,'A')
else:
print(" Ignored line", line, file=sys.stderr)
## Process Instrument
if obj:
try:
for ia in self.ias: obj.Ia(ia) # Defaults
except Exception as e:
raise Exception("Error while loading (defaults) %s into %s: %s" % (ia, obj, e))
try:
for ia in ias: obj.Ia(ia) # Collected
except Exception as e:
raise Exception("Error while loading %s into %s: %s" % (ia, obj, e))
try:
self.i.add(obj)
except Exception as e:
raise Exception("Error while loading %s into Instruments db: %s" % (obj, e))
obj = None
# Process Network
if n:
self.n[tabFilename] = n
# Process Station Group
if g:
self.g[tabFilename] = g
except Exception as e:
raise e
finally:
if fd:
fd.close()
def check(self):
# Instrument alone check
if self.i.keys:
print("\nCheking Instruments Loaded:\n", file=sys.stderr)
error = self.i.check(self.n)
if error:
for e in error: print(e, file=sys.stderr)
else:
print("\nNo instruments loaded", file=sys.stderr)
# Cross Check
error = []
if self.n:
print("\nChecking Networks Loaded:\n", file=sys.stderr)
for network in self.n.values():
error.extend(network.check(self.i))
if error:
for e in error: print(e, file=sys.stderr)
else:
print("\nNo network/stations loaded.", file=sys.stderr)
def sc3Obj(self, sc3i = None):
if not sc3i:
sc3i = seiscomp.datamodel.Inventory()
for network in list(self.n.values()):
sc3n = network.sc3Obj(self.i)
sc3i.add(sc3n)
for sc3o in self.i.sc3Objs():
sc3i.add(sc3o)
self.stationResolver.collectStations(sc3i)
for stationGroup in list(self.g.values()):
sc3g = stationGroup.sc3Obj(self.stationResolver)
sc3i.add(sc3g)
return sc3i

View File

@ -0,0 +1,31 @@
0 1.219929e-16 0.000000e+00
1 3.161921e-10 0.000000e+00
2 -4.314652e-08 0.000000e+00
3 -5.635558e-07 0.000000e+00
4 -1.267008e-04 0.000000e+00
5 3.658144e-03 0.000000e+00
6 1.675314e-04 0.000000e+00
7 -5.404505e-03 0.000000e+00
8 1.278609e-02 0.000000e+00
9 -1.803566e-02 0.000000e+00
10 1.473116e-02 0.000000e+00
11 3.226941e-03 0.000000e+00
12 -3.859694e-02 0.000000e+00
13 8.883527e-02 0.000000e+00
14 -1.482427e-01 0.000000e+00
15 2.177661e-01 0.000000e+00
16 8.099144e-01 0.000000e+00
17 1.245959e-01 0.000000e+00
18 -1.230407e-01 0.000000e+00
19 8.899753e-02 0.000000e+00
20 -4.850157e-02 0.000000e+00
21 1.425912e-02 0.000000e+00
22 6.896391e-03 0.000000e+00
23 -1.444342e-02 0.000000e+00
24 1.242861e-02 0.000000e+00
25 -6.568726e-03 0.000000e+00
26 1.522040e-03 0.000000e+00
27 3.142093e-03 0.000000e+00
28 3.656274e-05 0.000000e+00
29 -2.152995e-06 0.000000e+00
30 -2.597827e-07 0.000000e+00

View File

@ -0,0 +1,65 @@
0 1.315493e-11 0.000000e+00
1 1.501065e-04 0.000000e+00
2 1.339681e-02 0.000000e+00
3 1.644292e-01 0.000000e+00
4 5.688094e-01 0.000000e+00
5 5.173835e-01 0.000000e+00
6 -2.608360e-01 0.000000e+00
7 -1.220329e-01 0.000000e+00
8 2.571813e-01 0.000000e+00
9 -2.029026e-01 0.000000e+00
10 7.075881e-02 0.000000e+00
11 3.879666e-02 0.000000e+00
12 -1.143135e-01 0.000000e+00
13 1.354797e-01 0.000000e+00
14 -1.114475e-01 0.000000e+00
15 6.705481e-02 0.000000e+00
16 -1.927124e-02 0.000000e+00
17 -2.093129e-02 0.000000e+00
18 4.768056e-02 0.000000e+00
19 -5.933829e-02 0.000000e+00
20 5.757931e-02 0.000000e+00
21 -4.623331e-02 0.000000e+00
22 2.977715e-02 0.000000e+00
23 -1.248294e-02 0.000000e+00
24 -2.366075e-03 0.000000e+00
25 1.278821e-02 0.000000e+00
26 -1.846982e-02 0.000000e+00
27 1.879725e-02 0.000000e+00
28 -1.713865e-02 0.000000e+00
29 1.278199e-02 0.000000e+00
30 -7.675787e-03 0.000000e+00
31 3.255159e-03 0.000000e+00
32 -8.947563e-05 0.000000e+00
33 -1.778758e-03 0.000000e+00
34 2.596043e-03 0.000000e+00
35 -2.666169e-03 0.000000e+00
36 2.307403e-03 0.000000e+00
37 -1.770516e-03 0.000000e+00
38 1.218643e-03 0.000000e+00
39 -7.460492e-04 0.000000e+00
40 3.921752e-04 0.000000e+00
41 -1.583665e-04 0.000000e+00
42 2.437801e-05 0.000000e+00
43 3.807573e-05 0.000000e+00
44 -5.618048e-05 0.000000e+00
45 5.152771e-05 0.000000e+00
46 -3.856469e-05 0.000000e+00
47 2.530286e-05 0.000000e+00
48 -1.512465e-05 0.000000e+00
49 8.739795e-06 0.000000e+00
50 -4.648117e-06 0.000000e+00
51 1.376276e-06 0.000000e+00
52 7.042064e-07 0.000000e+00
53 2.241873e-07 0.000000e+00
54 -1.251026e-06 0.000000e+00
55 1.066771e-07 0.000000e+00
56 2.642876e-07 0.000000e+00
57 3.226638e-07 0.000000e+00
58 -8.074162e-08 0.000000e+00
59 -1.099048e-07 0.000000e+00
60 -3.325203e-08 0.000000e+00
61 1.388506e-08 0.000000e+00
62 1.056275e-08 0.000000e+00
63 2.577911e-09 0.000000e+00
64 -7.018623e-10 0.000000e+00

View File

@ -0,0 +1,67 @@
0 -3.653417e-17 0.000000e+00
1 3.674881e-08 0.000000e+00
2 -4.270596e-07 0.000000e+00
3 1.145020e-06 0.000000e+00
4 -1.875941e-07 0.000000e+00
5 -3.372737e-07 0.000000e+00
6 2.787469e-06 0.000000e+00
7 -3.744026e-06 0.000000e+00
8 5.411719e-06 0.000000e+00
9 7.473363e-06 0.000000e+00
10 -5.177595e-04 0.000000e+00
11 2.106768e-04 0.000000e+00
12 4.632577e-05 0.000000e+00
13 -6.082222e-04 0.000000e+00
14 1.441747e-03 0.000000e+00
15 -2.406265e-03 0.000000e+00
16 3.225338e-03 0.000000e+00
17 -3.506390e-03 0.000000e+00
18 2.814411e-03 0.000000e+00
19 -7.719714e-04 0.000000e+00
20 -2.805119e-03 0.000000e+00
21 7.778055e-03 0.000000e+00
22 -1.358146e-02 0.000000e+00
23 1.917646e-02 0.000000e+00
24 -2.297035e-02 0.000000e+00
25 2.403979e-02 0.000000e+00
26 -2.209865e-02 0.000000e+00
27 8.607339e-03 0.000000e+00
28 1.175252e-02 0.000000e+00
29 -4.477868e-02 0.000000e+00
30 9.649231e-02 0.000000e+00
31 -1.917548e-01 0.000000e+00
32 5.276523e-01 0.000000e+00
33 7.241670e-01 0.000000e+00
34 -1.569053e-01 0.000000e+00
35 4.425742e-02 0.000000e+00
36 3.141684e-03 0.000000e+00
37 -2.667144e-02 0.000000e+00
38 3.615316e-02 0.000000e+00
39 -3.856867e-02 0.000000e+00
40 3.108417e-02 0.000000e+00
41 -2.352589e-02 0.000000e+00
42 1.532109e-02 0.000000e+00
43 -7.403983e-03 0.000000e+00
44 1.096454e-03 0.000000e+00
45 3.097965e-03 0.000000e+00
46 -5.193199e-03 0.000000e+00
47 5.561311e-03 0.000000e+00
48 -4.761101e-03 0.000000e+00
49 3.382132e-03 0.000000e+00
50 -1.920520e-03 0.000000e+00
51 7.152175e-04 0.000000e+00
52 7.677194e-05 0.000000e+00
53 -4.518973e-04 0.000000e+00
54 5.026997e-04 0.000000e+00
55 -5.650370e-04 0.000000e+00
56 -5.568005e-05 0.000000e+00
57 1.577356e-05 0.000000e+00
58 -1.419847e-06 0.000000e+00
59 8.149094e-07 0.000000e+00
60 6.807946e-07 0.000000e+00
61 -1.252728e-06 0.000000e+00
62 1.524350e-06 0.000000e+00
63 -2.833359e-07 0.000000e+00
64 -1.063838e-08 0.000000e+00
65 1.257120e-09 0.000000e+00
66 -5.429542e-11 0.000000e+00

View File

@ -0,0 +1,39 @@
0 4.189518e-13 0.000000e+00
1 3.303176e-04 0.000000e+00
2 1.029213e-03 0.000000e+00
3 -3.141228e-03 0.000000e+00
4 2.057093e-04 0.000000e+00
5 1.525213e-03 0.000000e+00
6 -6.231927e-03 0.000000e+00
7 1.048013e-02 0.000000e+00
8 -1.312025e-02 0.000000e+00
9 1.078214e-02 0.000000e+00
10 -1.444550e-03 0.000000e+00
11 -1.587295e-02 0.000000e+00
12 3.950740e-02 0.000000e+00
13 -6.510363e-02 0.000000e+00
14 8.537156e-02 0.000000e+00
15 -8.919134e-02 0.000000e+00
16 5.006189e-02 0.000000e+00
17 8.372328e-01 0.000000e+00
18 2.667231e-01 0.000000e+00
19 -1.666931e-01 0.000000e+00
20 9.528399e-02 0.000000e+00
21 -5.092177e-02 0.000000e+00
22 1.614584e-02 0.000000e+00
23 7.063624e-03 0.000000e+00
24 -1.838771e-02 0.000000e+00
25 1.994141e-02 0.000000e+00
26 -1.548951e-02 0.000000e+00
27 8.527354e-03 0.000000e+00
28 -2.557887e-03 0.000000e+00
29 -1.811026e-03 0.000000e+00
30 2.426493e-03 0.000000e+00
31 -3.757695e-03 0.000000e+00
32 4.672927e-04 0.000000e+00
33 6.330721e-04 0.000000e+00
34 -1.568741e-06 0.000000e+00
35 -1.254798e-05 0.000000e+00
36 3.210405e-07 0.000000e+00
37 -2.633241e-08 0.000000e+00
38 -5.099975e-08 0.000000e+00

View File

@ -0,0 +1,81 @@
0 6.915055e-16 0.000000e+00
1 9.981469e-07 0.000000e+00
2 8.986285e-05 0.000000e+00
3 3.536859e-04 0.000000e+00
4 -3.196747e-04 0.000000e+00
5 2.398310e-04 0.000000e+00
6 4.343304e-05 0.000000e+00
7 -6.140379e-04 0.000000e+00
8 1.450240e-03 0.000000e+00
9 -2.414179e-03 0.000000e+00
10 3.243791e-03 0.000000e+00
11 -3.565280e-03 0.000000e+00
12 2.956281e-03 0.000000e+00
13 -1.048729e-03 0.000000e+00
14 -2.353488e-03 0.000000e+00
15 7.146584e-03 0.000000e+00
16 -1.283558e-02 0.000000e+00
17 1.849560e-02 0.000000e+00
18 -2.280356e-02 0.000000e+00
19 2.414348e-02 0.000000e+00
20 -2.075420e-02 0.000000e+00
21 1.085375e-02 0.000000e+00
22 7.376841e-03 0.000000e+00
23 -3.628054e-02 0.000000e+00
24 8.073029e-02 0.000000e+00
25 -1.563791e-01 0.000000e+00
26 5.966318e-01 0.000000e+00
27 6.616155e-01 0.000000e+00
28 -1.985033e-01 0.000000e+00
29 5.962802e-02 0.000000e+00
30 -1.201563e-02 0.000000e+00
31 -2.031269e-02 0.000000e+00
32 3.489734e-02 0.000000e+00
33 -3.783039e-02 0.000000e+00
34 3.414802e-02 0.000000e+00
35 -2.681871e-02 0.000000e+00
36 1.805448e-02 0.000000e+00
37 -9.684112e-03 0.000000e+00
38 1.924548e-03 0.000000e+00
39 2.270220e-03 0.000000e+00
40 -4.929948e-03 0.000000e+00
41 5.783542e-03 0.000000e+00
42 -5.278113e-03 0.000000e+00
43 4.012361e-03 0.000000e+00
44 -2.512171e-03 0.000000e+00
45 1.166119e-03 0.000000e+00
46 -1.915292e-04 0.000000e+00
47 -3.549948e-04 0.000000e+00
48 5.355819e-04 0.000000e+00
49 -4.810171e-04 0.000000e+00
50 4.186318e-04 0.000000e+00
51 7.809605e-05 0.000000e+00
52 -5.470072e-06 0.000000e+00
53 -2.123757e-06 0.000000e+00
54 -6.620526e-07 0.000000e+00
55 7.238966e-07 0.000000e+00
56 1.013226e-06 0.000000e+00
57 -1.929203e-06 0.000000e+00
58 7.801228e-07 0.000000e+00
59 -7.887565e-07 0.000000e+00
60 5.818626e-07 0.000000e+00
61 3.221050e-08 0.000000e+00
62 -1.076378e-07 0.000000e+00
63 1.999555e-08 0.000000e+00
64 -7.052141e-08 0.000000e+00
65 -1.357645e-08 0.000000e+00
66 -3.311185e-08 0.000000e+00
67 1.552117e-08 0.000000e+00
68 -5.395556e-09 0.000000e+00
69 7.791274e-09 0.000000e+00
70 2.075919e-10 0.000000e+00
71 -9.326780e-10 0.000000e+00
72 1.850689e-09 0.000000e+00
73 -1.973863e-09 0.000000e+00
74 1.334281e-09 0.000000e+00
75 -6.315467e-10 0.000000e+00
76 6.994718e-11 0.000000e+00
77 1.148694e-10 0.000000e+00
78 -5.595614e-11 0.000000e+00
79 5.760568e-12 0.000000e+00
80 -5.489862e-12 0.000000e+00

View File

@ -0,0 +1,400 @@
0 -1.280410E-09 0.000000E+00
1 9.089140E-09 0.000000E+00
2 2.857200E-08 0.000000E+00
3 7.068940E-08 0.000000E+00
4 1.503850E-07 0.000000E+00
5 2.898420E-07 0.000000E+00
6 5.199920E-07 0.000000E+00
7 8.824160E-07 0.000000E+00
8 1.431250E-06 0.000000E+00
9 2.234920E-06 0.000000E+00
10 3.377490E-06 0.000000E+00
11 4.959500E-06 0.000000E+00
12 7.097790E-06 0.000000E+00
13 9.924440E-06 0.000000E+00
14 1.358420E-05 0.000000E+00
15 1.823040E-05 0.000000E+00
16 2.401920E-05 0.000000E+00
17 3.110180E-05 0.000000E+00
18 3.961540E-05 0.000000E+00
19 4.967160E-05 0.000000E+00
20 6.134480E-05 0.000000E+00
21 7.465790E-05 0.000000E+00
22 8.956970E-05 0.000000E+00
23 1.059620E-04 0.000000E+00
24 1.236260E-04 0.000000E+00
25 1.422580E-04 0.000000E+00
26 1.614470E-04 0.000000E+00
27 1.806800E-04 0.000000E+00
28 1.993440E-04 0.000000E+00
29 2.167350E-04 0.000000E+00
30 2.320800E-04 0.000000E+00
31 2.445590E-04 0.000000E+00
32 2.533370E-04 0.000000E+00
33 2.576020E-04 0.000000E+00
34 2.566110E-04 0.000000E+00
35 2.497330E-04 0.000000E+00
36 2.364990E-04 0.000000E+00
37 2.166500E-04 0.000000E+00
38 1.901760E-04 0.000000E+00
39 1.573550E-04 0.000000E+00
40 1.187790E-04 0.000000E+00
41 7.536150E-05 0.000000E+00
42 2.833800E-05 0.000000E+00
43 -2.075750E-05 0.000000E+00
44 -7.013260E-05 0.000000E+00
45 -1.177970E-04 0.000000E+00
46 -1.616380E-04 0.000000E+00
47 -1.995190E-04 0.000000E+00
48 -2.293810E-04 0.000000E+00
49 -2.493630E-04 0.000000E+00
50 -2.579120E-04 0.000000E+00
51 -2.539050E-04 0.000000E+00
52 -2.367430E-04 0.000000E+00
53 -2.064400E-04 0.000000E+00
54 -1.636770E-04 0.000000E+00
55 -1.098340E-04 0.000000E+00
56 -4.697750E-05 0.000000E+00
57 2.218660E-05 0.000000E+00
58 9.440430E-05 0.000000E+00
59 1.660030E-04 0.000000E+00
60 2.330560E-04 0.000000E+00
61 2.915810E-04 0.000000E+00
62 3.377580E-04 0.000000E+00
63 3.681570E-04 0.000000E+00
64 3.799620E-04 0.000000E+00
65 3.711900E-04 0.000000E+00
66 3.408650E-04 0.000000E+00
67 2.891620E-04 0.000000E+00
68 2.174900E-04 0.000000E+00
69 1.285060E-04 0.000000E+00
70 2.606830E-05 0.000000E+00
71 -8.490010E-05 0.000000E+00
72 -1.986100E-04 0.000000E+00
73 -3.086790E-04 0.000000E+00
74 -4.084630E-04 0.000000E+00
75 -4.914240E-04 0.000000E+00
76 -5.515290E-04 0.000000E+00
77 -5.836450E-04 0.000000E+00
78 -5.839130E-04 0.000000E+00
79 -5.500750E-04 0.000000E+00
80 -4.817300E-04 0.000000E+00
81 -3.804970E-04 0.000000E+00
82 -2.500650E-04 0.000000E+00
83 -9.613190E-05 0.000000E+00
84 7.379770E-05 0.000000E+00
85 2.507300E-04 0.000000E+00
86 4.246150E-04 0.000000E+00
87 5.848830E-04 0.000000E+00
88 7.210410E-04 0.000000E+00
89 8.233180E-04 0.000000E+00
90 8.833110E-04 0.000000E+00
91 8.945860E-04 0.000000E+00
92 8.532140E-04 0.000000E+00
93 7.581840E-04 0.000000E+00
94 6.116610E-04 0.000000E+00
95 4.190820E-04 0.000000E+00
96 1.890410E-04 0.000000E+00
97 -6.701870E-05 0.000000E+00
98 -3.353110E-04 0.000000E+00
99 -6.003940E-04 0.000000E+00
100 -8.460070E-04 0.000000E+00
101 -1.056010E-03 0.000000E+00
102 -1.215390E-03 0.000000E+00
103 -1.311250E-03 0.000000E+00
104 -1.333740E-03 0.000000E+00
105 -1.276860E-03 0.000000E+00
106 -1.139110E-03 0.000000E+00
107 -9.238090E-04 0.000000E+00
108 -6.392740E-04 0.000000E+00
109 -2.985730E-04 0.000000E+00
110 8.095210E-05 0.000000E+00
111 4.784920E-04 0.000000E+00
112 8.708350E-04 0.000000E+00
113 1.233650E-03 0.000000E+00
114 1.542910E-03 0.000000E+00
115 1.776410E-03 0.000000E+00
116 1.915250E-03 0.000000E+00
117 1.945200E-03 0.000000E+00
118 1.857870E-03 0.000000E+00
119 1.651590E-03 0.000000E+00
120 1.331930E-03 0.000000E+00
121 9.117790E-04 0.000000E+00
122 4.110140E-04 0.000000E+00
123 -1.443240E-04 0.000000E+00
124 -7.232630E-04 0.000000E+00
125 -1.291520E-03 0.000000E+00
126 -1.813440E-03 0.000000E+00
127 -2.254090E-03 0.000000E+00
128 -2.581490E-03 0.000000E+00
129 -2.768760E-03 0.000000E+00
130 -2.796120E-03 0.000000E+00
131 -2.652470E-03 0.000000E+00
132 -2.336640E-03 0.000000E+00
133 -1.858050E-03 0.000000E+00
134 -1.236750E-03 0.000000E+00
135 -5.027860E-04 0.000000E+00
136 3.050470E-04 0.000000E+00
137 1.141090E-03 0.000000E+00
138 1.955230E-03 0.000000E+00
139 2.695760E-03 0.000000E+00
140 3.312460E-03 0.000000E+00
141 3.759760E-03 0.000000E+00
142 3.999910E-03 0.000000E+00
143 4.005660E-03 0.000000E+00
144 3.762670E-03 0.000000E+00
145 3.271090E-03 0.000000E+00
146 2.546440E-03 0.000000E+00
147 1.619580E-03 0.000000E+00
148 5.357070E-04 0.000000E+00
149 -6.475150E-04 0.000000E+00
150 -1.862780E-03 0.000000E+00
151 -3.036670E-03 0.000000E+00
152 -4.093770E-03 0.000000E+00
153 -4.961150E-03 0.000000E+00
154 -5.573010E-03 0.000000E+00
155 -5.875080E-03 0.000000E+00
156 -5.828670E-03 0.000000E+00
157 -5.414010E-03 0.000000E+00
158 -4.632620E-03 0.000000E+00
159 -3.508570E-03 0.000000E+00
160 -2.088510E-03 0.000000E+00
161 -4.402630E-04 0.000000E+00
162 1.349800E-03 0.000000E+00
163 3.180770E-03 0.000000E+00
164 4.942220E-03 0.000000E+00
165 6.520130E-03 0.000000E+00
166 7.803440E-03 0.000000E+00
167 8.690760E-03 0.000000E+00
168 9.097010E-03 0.000000E+00
169 8.959570E-03 0.000000E+00
170 8.243470E-03 0.000000E+00
171 6.945480E-03 0.000000E+00
172 5.096570E-03 0.000000E+00
173 2.762750E-03 0.000000E+00
174 4.398920E-05 0.000000E+00
175 -2.928690E-03 0.000000E+00
176 -5.998030E-03 0.000000E+00
177 -8.986910E-03 0.000000E+00
178 -1.170620E-02 0.000000E+00
179 -1.396360E-02 0.000000E+00
180 -1.557300E-02 0.000000E+00
181 -1.636440E-02 0.000000E+00
182 -1.619300E-02 0.000000E+00
183 -1.494760E-02 0.000000E+00
184 -1.255800E-02 0.000000E+00
185 -9.000540E-03 0.000000E+00
186 -4.301130E-03 0.000000E+00
187 1.463060E-03 0.000000E+00
188 8.165080E-03 0.000000E+00
189 1.563180E-02 0.000000E+00
190 2.364960E-02 0.000000E+00
191 3.197290E-02 0.000000E+00
192 4.033310E-02 0.000000E+00
193 4.845020E-02 0.000000E+00
194 5.604420E-02 0.000000E+00
195 6.284710E-02 0.000000E+00
196 6.861480E-02 0.000000E+00
197 7.313740E-02 0.000000E+00
198 7.624880E-02 0.000000E+00
199 7.783390E-02 0.000000E+00
200 7.783390E-02 0.000000E+00
201 7.624880E-02 0.000000E+00
202 7.313740E-02 0.000000E+00
203 6.861480E-02 0.000000E+00
204 6.284710E-02 0.000000E+00
205 5.604420E-02 0.000000E+00
206 4.845020E-02 0.000000E+00
207 4.033310E-02 0.000000E+00
208 3.197290E-02 0.000000E+00
209 2.364960E-02 0.000000E+00
210 1.563180E-02 0.000000E+00
211 8.165080E-03 0.000000E+00
212 1.463060E-03 0.000000E+00
213 -4.301130E-03 0.000000E+00
214 -9.000540E-03 0.000000E+00
215 -1.255800E-02 0.000000E+00
216 -1.494760E-02 0.000000E+00
217 -1.619300E-02 0.000000E+00
218 -1.636440E-02 0.000000E+00
219 -1.557300E-02 0.000000E+00
220 -1.396360E-02 0.000000E+00
221 -1.170620E-02 0.000000E+00
222 -8.986910E-03 0.000000E+00
223 -5.998030E-03 0.000000E+00
224 -2.928690E-03 0.000000E+00
225 4.398920E-05 0.000000E+00
226 2.762750E-03 0.000000E+00
227 5.096570E-03 0.000000E+00
228 6.945480E-03 0.000000E+00
229 8.243470E-03 0.000000E+00
230 8.959570E-03 0.000000E+00
231 9.097010E-03 0.000000E+00
232 8.690760E-03 0.000000E+00
233 7.803440E-03 0.000000E+00
234 6.520130E-03 0.000000E+00
235 4.942220E-03 0.000000E+00
236 3.180770E-03 0.000000E+00
237 1.349800E-03 0.000000E+00
238 -4.402630E-04 0.000000E+00
239 -2.088510E-03 0.000000E+00
240 -3.508570E-03 0.000000E+00
241 -4.632620E-03 0.000000E+00
242 -5.414010E-03 0.000000E+00
243 -5.828670E-03 0.000000E+00
244 -5.875080E-03 0.000000E+00
245 -5.573010E-03 0.000000E+00
246 -4.961150E-03 0.000000E+00
247 -4.093770E-03 0.000000E+00
248 -3.036670E-03 0.000000E+00
249 -1.862780E-03 0.000000E+00
250 -6.475150E-04 0.000000E+00
251 5.357070E-04 0.000000E+00
252 1.619580E-03 0.000000E+00
253 2.546440E-03 0.000000E+00
254 3.271090E-03 0.000000E+00
255 3.762670E-03 0.000000E+00
256 4.005660E-03 0.000000E+00
257 3.999910E-03 0.000000E+00
258 3.759760E-03 0.000000E+00
259 3.312460E-03 0.000000E+00
260 2.695760E-03 0.000000E+00
261 1.955230E-03 0.000000E+00
262 1.141090E-03 0.000000E+00
263 3.050470E-04 0.000000E+00
264 -5.027860E-04 0.000000E+00
265 -1.236750E-03 0.000000E+00
266 -1.858050E-03 0.000000E+00
267 -2.336640E-03 0.000000E+00
268 -2.652470E-03 0.000000E+00
269 -2.796120E-03 0.000000E+00
270 -2.768760E-03 0.000000E+00
271 -2.581490E-03 0.000000E+00
272 -2.254090E-03 0.000000E+00
273 -1.813440E-03 0.000000E+00
274 -1.291520E-03 0.000000E+00
275 -7.232630E-04 0.000000E+00
276 -1.443240E-04 0.000000E+00
277 4.110140E-04 0.000000E+00
278 9.117790E-04 0.000000E+00
279 1.331930E-03 0.000000E+00
280 1.651590E-03 0.000000E+00
281 1.857870E-03 0.000000E+00
282 1.945200E-03 0.000000E+00
283 1.915250E-03 0.000000E+00
284 1.776410E-03 0.000000E+00
285 1.542910E-03 0.000000E+00
286 1.233650E-03 0.000000E+00
287 8.708350E-04 0.000000E+00
288 4.784920E-04 0.000000E+00
289 8.095210E-05 0.000000E+00
290 -2.985730E-04 0.000000E+00
291 -6.392740E-04 0.000000E+00
292 -9.238090E-04 0.000000E+00
293 -1.139110E-03 0.000000E+00
294 -1.276860E-03 0.000000E+00
295 -1.333740E-03 0.000000E+00
296 -1.311250E-03 0.000000E+00
297 -1.215390E-03 0.000000E+00
298 -1.056010E-03 0.000000E+00
299 -8.460070E-04 0.000000E+00
300 -6.003940E-04 0.000000E+00
301 -3.353110E-04 0.000000E+00
302 -6.701870E-05 0.000000E+00
303 1.890410E-04 0.000000E+00
304 4.190820E-04 0.000000E+00
305 6.116610E-04 0.000000E+00
306 7.581840E-04 0.000000E+00
307 8.532140E-04 0.000000E+00
308 8.945860E-04 0.000000E+00
309 8.833110E-04 0.000000E+00
310 8.233180E-04 0.000000E+00
311 7.210410E-04 0.000000E+00
312 5.848830E-04 0.000000E+00
313 4.246150E-04 0.000000E+00
314 2.507300E-04 0.000000E+00
315 7.379770E-05 0.000000E+00
316 -9.613190E-05 0.000000E+00
317 -2.500650E-04 0.000000E+00
318 -3.804970E-04 0.000000E+00
319 -4.817300E-04 0.000000E+00
320 -5.500750E-04 0.000000E+00
321 -5.839130E-04 0.000000E+00
322 -5.836450E-04 0.000000E+00
323 -5.515290E-04 0.000000E+00
324 -4.914240E-04 0.000000E+00
325 -4.084630E-04 0.000000E+00
326 -3.086790E-04 0.000000E+00
327 -1.986100E-04 0.000000E+00
328 -8.490010E-05 0.000000E+00
329 2.606830E-05 0.000000E+00
330 1.285060E-04 0.000000E+00
331 2.174900E-04 0.000000E+00
332 2.891620E-04 0.000000E+00
333 3.408650E-04 0.000000E+00
334 3.711900E-04 0.000000E+00
335 3.799620E-04 0.000000E+00
336 3.681570E-04 0.000000E+00
337 3.377580E-04 0.000000E+00
338 2.915810E-04 0.000000E+00
339 2.330560E-04 0.000000E+00
340 1.660030E-04 0.000000E+00
341 9.440430E-05 0.000000E+00
342 2.218660E-05 0.000000E+00
343 -4.697750E-05 0.000000E+00
344 -1.098340E-04 0.000000E+00
345 -1.636770E-04 0.000000E+00
346 -2.064400E-04 0.000000E+00
347 -2.367430E-04 0.000000E+00
348 -2.539050E-04 0.000000E+00
349 -2.579120E-04 0.000000E+00
350 -2.493630E-04 0.000000E+00
351 -2.293810E-04 0.000000E+00
352 -1.995190E-04 0.000000E+00
353 -1.616380E-04 0.000000E+00
354 -1.177970E-04 0.000000E+00
355 -7.013260E-05 0.000000E+00
356 -2.075750E-05 0.000000E+00
357 2.833800E-05 0.000000E+00
358 7.536150E-05 0.000000E+00
359 1.187790E-04 0.000000E+00
360 1.573550E-04 0.000000E+00
361 1.901760E-04 0.000000E+00
362 2.166500E-04 0.000000E+00
363 2.364990E-04 0.000000E+00
364 2.497330E-04 0.000000E+00
365 2.566110E-04 0.000000E+00
366 2.576020E-04 0.000000E+00
367 2.533370E-04 0.000000E+00
368 2.445590E-04 0.000000E+00
369 2.320800E-04 0.000000E+00
370 2.167350E-04 0.000000E+00
371 1.993440E-04 0.000000E+00
372 1.806800E-04 0.000000E+00
373 1.614470E-04 0.000000E+00
374 1.422580E-04 0.000000E+00
375 1.236260E-04 0.000000E+00
376 1.059620E-04 0.000000E+00
377 8.956970E-05 0.000000E+00
378 7.465790E-05 0.000000E+00
379 6.134480E-05 0.000000E+00
380 4.967160E-05 0.000000E+00
381 3.961540E-05 0.000000E+00
382 3.110180E-05 0.000000E+00
383 2.401920E-05 0.000000E+00
384 1.823040E-05 0.000000E+00
385 1.358420E-05 0.000000E+00
386 9.924440E-06 0.000000E+00
387 7.097790E-06 0.000000E+00
388 4.959500E-06 0.000000E+00
389 3.377490E-06 0.000000E+00
390 2.234920E-06 0.000000E+00
391 1.431250E-06 0.000000E+00
392 8.824160E-07 0.000000E+00
393 5.199920E-07 0.000000E+00
394 2.898420E-07 0.000000E+00
395 1.503850E-07 0.000000E+00
396 7.068940E-08 0.000000E+00
397 2.857200E-08 0.000000E+00
398 9.089140E-09 0.000000E+00
399 -1.280410E-09 0.000000E+00

View File

@ -0,0 +1,96 @@
0 -4.624365e-06 0.000000e+00
1 -8.258298e-05 0.000000e+00
2 -2.260141e-04 0.000000e+00
3 -2.539009e-04 0.000000e+00
4 7.665667e-07 0.000000e+00
5 3.050186e-04 0.000000e+00
6 1.712792e-04 0.000000e+00
7 -3.494469e-04 0.000000e+00
8 -4.491013e-04 0.000000e+00
9 2.631577e-04 0.000000e+00
10 7.897725e-04 0.000000e+00
11 3.857301e-05 0.000000e+00
12 -1.091783e-03 0.000000e+00
13 -5.999956e-04 0.000000e+00
14 1.206435e-03 0.000000e+00
15 1.397154e-03 0.000000e+00
16 -9.624677e-04 0.000000e+00
17 -2.313273e-03 0.000000e+00
18 2.078273e-04 0.000000e+00
19 3.130074e-03 0.000000e+00
20 1.137016e-03 0.000000e+00
21 -3.543348e-03 0.000000e+00
22 -3.024242e-03 0.000000e+00
23 3.207636e-03 0.000000e+00
24 5.238007e-03 0.000000e+00
25 -1.803839e-03 0.000000e+00
26 -7.375909e-03 0.000000e+00
27 -8.729728e-04 0.000000e+00
28 8.870910e-03 0.000000e+00
29 4.831847e-03 0.000000e+00
30 -9.042305e-03 0.000000e+00
31 -9.813905e-03 0.000000e+00
32 7.179136e-03 0.000000e+00
33 1.525300e-02 0.000000e+00
34 -2.628732e-03 0.000000e+00
35 -2.026759e-02 0.000000e+00
36 -5.142914e-03 0.000000e+00
37 2.366362e-02 0.000000e+00
38 1.657857e-02 0.000000e+00
39 -2.387548e-02 0.000000e+00
40 -3.227953e-02 0.000000e+00
41 1.860678e-02 0.000000e+00
42 5.394208e-02 0.000000e+00
43 -3.140518e-03 0.000000e+00
44 -8.849621e-02 0.000000e+00
45 -4.014856e-02 0.000000e+00
46 1.847636e-01 0.000000e+00
47 4.066011e-01 0.000000e+00
48 4.066011e-01 0.000000e+00
49 1.847636e-01 0.000000e+00
50 -4.014856e-02 0.000000e+00
51 -8.849621e-02 0.000000e+00
52 -3.140518e-03 0.000000e+00
53 5.394208e-02 0.000000e+00
54 1.860678e-02 0.000000e+00
55 -3.227953e-02 0.000000e+00
56 -2.387548e-02 0.000000e+00
57 1.657857e-02 0.000000e+00
58 2.366362e-02 0.000000e+00
59 -5.142914e-03 0.000000e+00
60 -2.026759e-02 0.000000e+00
61 -2.628732e-03 0.000000e+00
62 1.525300e-02 0.000000e+00
63 7.179136e-03 0.000000e+00
64 -9.813905e-03 0.000000e+00
65 -9.042305e-03 0.000000e+00
66 4.831847e-03 0.000000e+00
67 8.870910e-03 0.000000e+00
68 -8.729728e-04 0.000000e+00
69 -7.375909e-03 0.000000e+00
70 -1.803839e-03 0.000000e+00
71 5.238007e-03 0.000000e+00
72 3.207636e-03 0.000000e+00
73 -3.024242e-03 0.000000e+00
74 -3.543348e-03 0.000000e+00
75 1.137016e-03 0.000000e+00
76 3.130074e-03 0.000000e+00
77 2.078273e-04 0.000000e+00
78 -2.313273e-03 0.000000e+00
79 -9.624677e-04 0.000000e+00
80 1.397154e-03 0.000000e+00
81 1.206435e-03 0.000000e+00
82 -5.999956e-04 0.000000e+00
83 -1.091783e-03 0.000000e+00
84 3.857301e-05 0.000000e+00
85 7.897725e-04 0.000000e+00
86 2.631577e-04 0.000000e+00
87 -4.491013e-04 0.000000e+00
88 -3.494469e-04 0.000000e+00
89 1.712792e-04 0.000000e+00
90 3.050186e-04 0.000000e+00
91 7.665667e-07 0.000000e+00
92 -2.539009e-04 0.000000e+00
93 -2.260141e-04 0.000000e+00
94 -8.258298e-05 0.000000e+00
95 -4.624365e-06 0.000000e+00

View File

@ -0,0 +1,160 @@
0 4.032461e-05 0.000000e+00
1 7.453280e-05 0.000000e+00
2 1.234553e-04 0.000000e+00
3 1.701887e-04 0.000000e+00
4 1.973105e-04 0.000000e+00
5 1.854891e-04 0.000000e+00
6 1.193456e-04 0.000000e+00
7 -5.723101e-06 0.000000e+00
8 -1.779232e-04 0.000000e+00
9 -3.673259e-04 0.000000e+00
10 -5.295104e-04 0.000000e+00
11 -6.150085e-04 0.000000e+00
12 -5.832354e-04 0.000000e+00
13 -4.172837e-04 0.000000e+00
14 -1.349516e-04 0.000000e+00
15 2.083330e-04 0.000000e+00
16 5.277090e-04 0.000000e+00
17 7.281899e-04 0.000000e+00
18 7.312587e-04 0.000000e+00
19 5.019202e-04 0.000000e+00
20 6.783176e-05 0.000000e+00
21 -4.771493e-04 0.000000e+00
22 -9.891580e-04 0.000000e+00
23 -1.308918e-03 0.000000e+00
24 -1.307358e-03 0.000000e+00
25 -9.300168e-04 0.000000e+00
26 -2.262541e-04 0.000000e+00
27 6.483476e-04 0.000000e+00
28 1.461708e-03 0.000000e+00
29 1.963222e-03 0.000000e+00
30 1.956625e-03 0.000000e+00
31 1.367725e-03 0.000000e+00
32 2.854628e-04 0.000000e+00
33 -1.040387e-03 0.000000e+00
34 -2.250679e-03 0.000000e+00
35 -2.969069e-03 0.000000e+00
36 -2.912737e-03 0.000000e+00
37 -1.990583e-03 0.000000e+00
38 -3.573537e-04 0.000000e+00
39 1.598840e-03 0.000000e+00
40 3.340972e-03 0.000000e+00
41 4.323764e-03 0.000000e+00
42 4.155636e-03 0.000000e+00
43 2.736002e-03 0.000000e+00
44 3.234310e-04 0.000000e+00
45 -2.494752e-03 0.000000e+00
46 -4.934943e-03 0.000000e+00
47 -6.225197e-03 0.000000e+00
48 -5.836136e-03 0.000000e+00
49 -3.668966e-03 0.000000e+00
50 -1.394092e-04 0.000000e+00
51 3.880228e-03 0.000000e+00
52 7.261232e-03 0.000000e+00
53 8.919356e-03 0.000000e+00
54 8.140252e-03 0.000000e+00
55 4.837050e-03 0.000000e+00
56 -3.434785e-04 0.000000e+00
57 -6.115665e-03 0.000000e+00
58 -1.084778e-02 0.000000e+00
59 -1.299272e-02 0.000000e+00
60 -1.154995e-02 0.000000e+00
61 -6.430376e-03 0.000000e+00
62 1.391199e-03 0.000000e+00
63 1.000571e-02 0.000000e+00
64 1.698057e-02 0.000000e+00
65 1.997340e-02 0.000000e+00
66 1.740665e-02 0.000000e+00
67 9.029463e-03 0.000000e+00
68 -3.794969e-03 0.000000e+00
69 -1.818304e-02 0.000000e+00
70 -3.022295e-02 0.000000e+00
71 -3.578333e-02 0.000000e+00
72 -3.146898e-02 0.000000e+00
73 -1.550444e-02 0.000000e+00
74 1.167237e-02 0.000000e+00
75 4.726833e-02 0.000000e+00
76 8.650819e-02 0.000000e+00
77 1.234668e-01 0.000000e+00
78 1.521942e-01 0.000000e+00
79 1.678939e-01 0.000000e+00
80 1.678939e-01 0.000000e+00
81 1.521942e-01 0.000000e+00
82 1.234668e-01 0.000000e+00
83 8.650819e-02 0.000000e+00
84 4.726833e-02 0.000000e+00
85 1.167237e-02 0.000000e+00
86 -1.550444e-02 0.000000e+00
87 -3.146898e-02 0.000000e+00
88 -3.578333e-02 0.000000e+00
89 -3.022295e-02 0.000000e+00
90 -1.818304e-02 0.000000e+00
91 -3.794969e-03 0.000000e+00
92 9.029463e-03 0.000000e+00
93 1.740665e-02 0.000000e+00
94 1.997340e-02 0.000000e+00
95 1.698057e-02 0.000000e+00
96 1.000571e-02 0.000000e+00
97 1.391199e-03 0.000000e+00
98 -6.430376e-03 0.000000e+00
99 -1.154995e-02 0.000000e+00
100 -1.299272e-02 0.000000e+00
101 -1.084778e-02 0.000000e+00
102 -6.115665e-03 0.000000e+00
103 -3.434785e-04 0.000000e+00
104 4.837050e-03 0.000000e+00
105 8.140252e-03 0.000000e+00
106 8.919356e-03 0.000000e+00
107 7.261232e-03 0.000000e+00
108 3.880228e-03 0.000000e+00
109 -1.394092e-04 0.000000e+00
110 -3.668966e-03 0.000000e+00
111 -5.836136e-03 0.000000e+00
112 -6.225197e-03 0.000000e+00
113 -4.934943e-03 0.000000e+00
114 -2.494752e-03 0.000000e+00
115 3.234310e-04 0.000000e+00
116 2.736002e-03 0.000000e+00
117 4.155636e-03 0.000000e+00
118 4.323764e-03 0.000000e+00
119 3.340972e-03 0.000000e+00
120 1.598840e-03 0.000000e+00
121 -3.573537e-04 0.000000e+00
122 -1.990583e-03 0.000000e+00
123 -2.912737e-03 0.000000e+00
124 -2.969069e-03 0.000000e+00
125 -2.250679e-03 0.000000e+00
126 -1.040387e-03 0.000000e+00
127 2.854628e-04 0.000000e+00
128 1.367725e-03 0.000000e+00
129 1.956625e-03 0.000000e+00
130 1.963222e-03 0.000000e+00
131 1.461708e-03 0.000000e+00
132 6.483476e-04 0.000000e+00
133 -2.262541e-04 0.000000e+00
134 -9.300168e-04 0.000000e+00
135 -1.307358e-03 0.000000e+00
136 -1.308918e-03 0.000000e+00
137 -9.891580e-04 0.000000e+00
138 -4.771493e-04 0.000000e+00
139 6.783176e-05 0.000000e+00
140 5.019202e-04 0.000000e+00
141 7.312587e-04 0.000000e+00
142 7.281899e-04 0.000000e+00
143 5.277090e-04 0.000000e+00
144 2.083330e-04 0.000000e+00
145 -1.349516e-04 0.000000e+00
146 -4.172837e-04 0.000000e+00
147 -5.832354e-04 0.000000e+00
148 -6.150085e-04 0.000000e+00
149 -5.295104e-04 0.000000e+00
150 -3.673259e-04 0.000000e+00
151 -1.779232e-04 0.000000e+00
152 -5.723101e-06 0.000000e+00
153 1.193456e-04 0.000000e+00
154 1.854891e-04 0.000000e+00
155 1.973105e-04 0.000000e+00
156 1.701887e-04 0.000000e+00
157 1.234553e-04 0.000000e+00
158 7.453280e-05 0.000000e+00
159 4.032461e-05 0.000000e+00

View File

@ -0,0 +1,73 @@
# Begin data logger list
# Gain max.spfr mcld IIR(A,I)/FIR filter stages (not mandatory)
Ia: DigitizerModel="M24" M24-SC M24/BW
Ia: DigitizerModel="Q330" Q330/N Q330/HR Q330-SC
Ia: RecorderModel="M24" M24-SC M24/BW
Ia: RecorderModel="SeisComP" Q330-SC
Ia: RecorderModel="Q330" Q330/N Q330/HR
Ia: RecorderManufacturer="Quanterra" Q330/N Q330/HR
Ia: RecorderManufacturer="Lennartz" M24-SC M24/BW
Ia: RecorderManufacturer="Alpha2000" Q330-SC
Ia: DigitizerManufacturer="Quanterra" Q330/N Q330/HR Q330-SC
Ia: DigitizerManufacturer="Lennartz" M24-SC M24/BW
# Gain max.spfr mcld IIR(A,I)/FIR filter stages (not mandatory)
Dl: Q330/N 419430.0 100.0 0.0 Q330 200,100_1,50_2,40_3,20_4,1_5,0.1_5/10
Dl: Q330/HR 1677720.0 100.0 0.0 Q330 100_1,50_2,40_3,20_4,1_5,0.1_5/10
Dl: Q330-SC 419430.0 100.0 0.0 Q330 100_1,50_1/6,20_1/7,1_1/7/8/9,0.1_1/7/8/9/10
#
# End data logger list
# FIR filter list for Quanterra Q330 digitizer and Seiscomp recorder
# Name Sym ncf inrate fac delay corrtn gain frg
Ff: Q330_FIR_1 q330_b100_100 A 65 0 100.0 1 0.041607 0.041607 1.0 0.0
Ff: Q330_FIR_2 q330_b100_50 A 81 0 50.0 1 0.531607 0.531607 1.0 0.0
Ff: Q330_FIR_3 q330_b100_40 A 39 0 40.0 1 0.430462 0.430462 1.0 0.0
Ff: Q330_FIR_4 q330_b100_20 A 67 0 20.0 1 1.630462 1.630462 1.0 0.0
Ff: Q330_FIR_5 q330_b100_1 A 31 0 1.0 1 15.930462 15.930462 1.0 0.0
Ff: Q330_FIR_6 scp_deci2.1 C 48 0 100.0 2 0.000 0.0 1.0 0.0
Ff: Q330_FIR_7 scp_deci5.1 C 80 0 100.0 5 0.000 0.0 1.0 0.0
Ff: Q330_FIR_8 scp_deci2.1 C 48 0 20.0 2 0.000 0.0 1.0 0.0
Ff: Q330_FIR_9 scp_deci10.1 C 200 0 10.0 10 0.000 0.0 1.0 0.0
Ff: Q330_FIR_10 scp_deci10.1 C 200 0 1.0 10 0.000 0.0 4.0 0.0
# Digitizer IIR filter response list
#
# Digitizer analog response list
#
# Begin seismometer list
# Seismometer analog response list
# . Gain frgn Norm.fac fnr nz np Zeros&Poles
# Sensor type: VBB
Ia: Model="STS-2/CZ" STS-2/CZ
Ia: Model="STS-2/N" STS-2/N
Ia: Model="STS-2/G2" STS-2/G2
Ia: Model="STS-2/HG" STS-2/HG
Ia: Model="STS-2/G1" STS-2/G1
Ia: Model="STS-2/G3" STS-2/G3
Ia: Type="VBB" STS-2/CZ STS-2/N STS-2/G2 STS-2/HG STS-2/G3 STS-2/G1
Ia: Unit="M/S" STS-2/CZ STS-2/N STS-2/G2 STS-2/HG STS-2/G3 STS-2/G1
Ia: Manufacturer="Streckeisen" STS-2/CZ STS-2/N STS-2/G2 STS-2/HG STS-2/G3 STS-2/G1
Se: STS-2/N 1500.0 0.02 6.0077e7 1.0 2 5 2(0.0,0.0) (-0.037004,0.037016) (-0.037004,-0.037016) (-251.33,0.0) (-131.04,-467.29) (-131.04,467.29)
Se: STS-2/G1 1500.0 0.02 3.46844e17 1.0 5 9 2(0.0,0.0) (-15.15,0.0) (-318.6,401.2) (-318.6,-401.2) (-0.037,0.037) (-0.037,-0.037) (-15.99,0.0) (-100.9,401.9) (-100.9,-401.9) (-187.2,0.0) (-417.1,0.0) (-7454.0,7142.0) (-7454.0,-7142.0)
Se: STS-2/G2 1500.0 0.02 3.46844e17 1.0 9 14 2(0.0,0.0) (-10.75,0.0) (-294.6,0.0) (-555.1,0.0) (-683.9,175.5) (-683.9,-175.5) (-5907.0,3411.0) (-5907.0,-3411.0) (-0.037,0.037) (-0.037,-0.037) (-10.95,0.0) (-98.44,442.8) (-98.44,-442.8) (-251.1,0.0) (-556.8,60.0) (-556.8,-60.0) (-1391.0,0.0) (-4936.0,4713.0) (-4936.0,-4713.0) (-6227.0,0.0) (-6909.0,9208.0) (-6909.0,-9208.0)
Se: STS-2/G3 1500.0 0.02 3.46844e17 1.0 6 11 2(0.0,0.0) (-15.15,0.0) (-176.6,0.0) (-463.1,430.5) (-463.1,-430.5) (-0.037,0.037) (-0.037,-0.037) (-15.64,0.0) (-97.34,-400.7) (-97.34,400.7) (-255.1,0.0) (-374.8,0.0) (-520.3,0.0) (-10530.,10050.) (-10530.,-10050.) (-13300.,0.0)
#Streckeisen_STS-2/HG> 20000.0 0.02 3.46844e17 1.0 6 11 2(0.0,0.0) (-15.15,0.0) (-176.6,0.0) (-463.1,430.5) (-463.1,430.5) (-0.037,0.037) (-0.037,-0.037) (-15.64,0.0) (-97.34,-400.7) (-97.34,400.7) (-255.1,0.0) (-374.8,0.0) (-520.3,0.0) (-10530.,10050.) (-10530.,-10050.) (-13300.,0.0)
#Streckeisen_STS-2/CZ> 1500.0 1.0 4.47172e2 1.0 6 7 2(0.0,0.0) (-15.1488,0.0) (-199.554,0.0) (-461.814,429.079) (-461.814,-429.079) (-0.03702,0.03702) (-0.03702,-0.03702) (-15.2744,0.0) (-82.8124,409.852) (-82.8124,-409.852) (-443.314,0.0) (-454.526,0.0)
# End seismometer list

View File

@ -0,0 +1,291 @@
#!/usr/bin/env python
###############################################################################
# Copyright (C) 2020 Helmholtz-Zentrum Potsdam - Deutsches
# GeoForschungsZentrum GFZ
#
# License: GPL Affero General Public License (GNU AGPL) version 3.0
# Author: Peter L. Evans
# E-mail: <pevans@gfz-potsdam.de>
#
###############################################################################
from __future__ import print_function
from nettab.tab import Tab
import json
import os
import sys
import tempfile
import unittest
# Just to dump XML output??:
try:
import seiscomp.io as IO
except ImportError:
print('Failed to import seiscomp.io module, trying seiscomp3.IO instead')
from seiscomp3 import IO
# Just to examine the output XML:
import xml.etree.ElementTree as ET
def xmlparse(filename):
parser = ET.XMLParser()
try:
parser.feed(open(filename).read())
except Exception:
raise
elem = parser.close()
ns = '{http://geofon.gfz-potsdam.de/ns/seiscomp3-schema/0.11}'
return (elem, ns)
class TestTab(unittest.TestCase):
simpleTab = '''
Nw: QQ 2020-04-01
Na: Description="Atlantis Seismic Network"
Sl: AA01 "Pillars of Hercules/Atlantis" Q330/N%xxxx STS-2/N%yyyy 100/20 ZNE 30.0 -15.0 -900 2.0 2020-04-02
'''
tabWithPid = '''
Nw: QQ 2020-04-01
Na: Description="Atlantis Seismic Network"
Na: Pid="doi:10.1234/xyz"
Sl: AA01 "Pillars of Hercules/Atlantis" Q330/N%xxxx STS-2/N%yyyy 100/20 ZNE 30.0 -15.0 -900 2.0 2020-04-02
'''
instFile = 'small-inst.db'
templateTab = '''
Nw: {nwline}
Na: {naline}
Sl: {slline}
'''
def _writeTempTab(self, tabText):
'''Put a nettab formatted string into a temporary file,
returning the file name.
'''
with tempfile.NamedTemporaryFile(delete=False) as tab:
print(tabText, file=tab)
tab.close()
return tab.name
def _writeInvXML(self, inv, filename='something.xml'):
'''Copied from tab2inv.py'''
ar = IO.XMLArchive()
print("Generating file: %s" % filename,
file=sys.stderr)
ar.create(filename)
ar.setFormattedOutput(True)
ar.setCompression(False)
ar.writeObject(inv)
ar.close()
def _writeNewInvXML(self, sc3inv, filename):
try:
os.unlink(filename)
except OSError: # Python3: Catch FileNotFoundError instead.
pass
self._writeInvXML(sc3inv, filename)
def test_1(self):
'''Create object'''
t = Tab()
print('Expect: "Warning, not filter folder supplied."',
file=sys.stderr)
def test_2_filter(self):
'''Provide a (trivial, non-useful) filter folder'''
t = Tab(None, None, '.', None, None)
def test_2_defaults_warning(self):
'''Provide and load a defaults file'''
defaults = tempfile.NamedTemporaryFile(delete=False)
print('''
Nw: QQ 2001/001
''', file=defaults)
defaultsFile = defaults.name
defaults.close()
t = Tab(None, defaultsFile, '.', None, None)
os.unlink(defaultsFile)
print("Expect: 'Warning: Defaults file can only contain attributes'",
file=sys.stderr)
def test_2_defaults_attributes(self):
'''Provide and load a defaults file'''
defaults = tempfile.NamedTemporaryFile(delete=False)
print('''
Na: Foo=bar
Sa: StationFoo=bla * *
Ia: InstrumentFoo=blu *
''', file=defaults)
defaultsFile = defaults.name
defaults.close()
t = Tab(None, defaultsFile, '.', None, None)
os.unlink(defaultsFile)
def test_3_digest(self):
tabFile = self._writeTempTab(self.simpleTab)
t = Tab(None, None, '.', None, None)
t.digest(tabFile)
os.unlink(tabFile)
def SKIPtest_3_digest_check(self):
tabFile = self._writeTempTab(self.simpleTab)
t = Tab(None, None, 'filters', None, None)
t.digest(tabFile)
t.digest(self.instFile)
t.check()
os.unlink(tabFile)
def test_4_digest_twice(self):
'''Exception is raised by digesting twice.'''
tabFile = self._writeTempTab(self.simpleTab)
t = Tab(None, None, '.', None, None)
t.digest(tabFile)
with self.assertRaises(Exception):
t.digest(tabFile)
# print('Expect: "Warning: File {name} is already digested."')
os.unlink(tabFile)
def test_5_na_after_sa(self):
'''Not allowed to provide Na lines after a Sl line'''
s = '\n'.join([self.simpleTab, 'Na: Pid=10.123/xyz'])
tabFile = self._writeTempTab(s)
with self.assertRaises(Exception):
t.digest(tabFile)
# print('Expect "No Na lines after a Sl line.',
# 'Network has already been defined."')
os.unlink(tabFile)
def test_6_network_pid(self):
'''Key 'Pid' is an allowed network attribute'''
tabString = '''
Nw: QQ 2001/001
Na: Region=Atlantis
Na: Pid=10.123/xyz
'''
tabFile = self._writeTempTab(tabString)
t = Tab(None, None, '.', None, None)
t.digest(tabFile)
os.unlink(tabFile)
def test_6_network_pid_check(self):
'''No problem to define extra unhandled attributes'''
tabString = '''
Nw: QQ 2001/001
Na: Region=Atlantis
Na: Pid=10.123/xyz
Na: Foo=bar
'''
tabFile = self._writeTempTab(tabString)
t = Tab(None, None, '.', None, None)
t.digest(tabFile)
t.check()
os.unlink(tabFile)
def test_7_sc3Obj(self):
'''Call sc3Obj with a trivial t'''
t = Tab(None, None, '.', None, None)
sc3inv = t.sc3Obj()
def test_8_network_sc3Obj(self):
'''Call sc3Obj with an actual network, write XML'''
tabFile = self._writeTempTab(self.simpleTab)
t = Tab(None, None, 'filters', None, None)
t.digest(tabFile)
t.digest(self.instFile)
sc3inv = t.sc3Obj()
# Returns ok, but reports inst.db errors and warnings to stdout.
self.assertTrue(sc3inv)
if sc3inv is None:
assert('scinv is None')
sc3inv
outFile = '/tmp/testTabInv.xml'
try:
os.unlink(outFile)
except OSError: # Python3: Catch FileNotFoundError instead.
pass
self._writeInvXML(sc3inv, filename=outFile)
self.assertTrue(os.path.exists(outFile))
# Further checks: that the file contains a network, etc.
def test_9_network_pid_sc3Obj(self):
'''Load a network with PID, write XML, confirm PID is there.
Older nettabs reported 'ignoring attribute Pid'.
'''
tabFile = self._writeTempTab(self.tabWithPid)
t = Tab(None, None, 'filters', None, None)
t.digest(tabFile)
t.digest(self.instFile)
sc3inv = t.sc3Obj()
self.assertTrue(sc3inv)
outFile = '/tmp/testTabInvPid.xml'
self._writeNewInvXML(sc3inv, outFile)
self.assertTrue(os.path.exists(outFile))
# Check that the file contains exactly one network comment
# which is a JSON string with PID.
# e.g. '{"type": "DOI", "value": "10.1234/xsdfa"}'
(elem, ns) = xmlparse(outFile)
for e in elem:
for f in e:
if f.tag == ns + 'network':
g = f.findall(ns + 'comment')
self.assertTrue(len(g) == 1)
t = g[0].findall(ns + 'text')
text = t[0].text
j = json.loads(t[0].text)
self.assertEqual(j['type'], 'DOI')
self.assertEqual(j['value'], '10.1234/xyz')
### self.assertEqual(t[0].text, 'doi:10.1234/xyz')
def test_10_network_comment(self):
tabString = '''
Nw: NN 2020/092
Na: Region=Atlantis
Na: Comment="This is commentary"
Na: Remark="Remarkable!"
Sl: AA01 "Zeus" Q330/N%xxxx STS-2/N%yyyy 20 Z 30 -15 -2 2.0 2020/093
'''
tabFile = self._writeTempTab(tabString)
t = Tab(None, None, 'filters', None, None)
t.digest(tabFile)
t.digest(self.instFile)
t.check()
os.unlink(tabFile)
sc3inv = t.sc3Obj()
self.assertTrue(sc3inv)
outFile = '/tmp/testTabInvComment.xml'
self._writeNewInvXML(sc3inv, '/tmp/testTabInvComment.xml')
self.assertTrue(os.path.exists(outFile))
# Further checks: that the file contains a network with PID. TODO
(elem, ns) = xmlparse(outFile)
for e in elem:
for f in e:
if f.tag == ns + 'network':
g = f.findall(ns + 'comment')
self.assertTrue(len(g) == 1)
# DEBUG print('DEBUG Network comment found:',
# g[0].findall(ns + 'text')[0].text)
if __name__ == '__main__':
unittest.main(verbosity=1)

View File

@ -0,0 +1,4 @@
import os
import sys
sys.setdlopenflags(os.RTLD_LAZY | os.RTLD_GLOBAL)

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
lib/python/seiscomp/_geo.so Normal file

Binary file not shown.

BIN
lib/python/seiscomp/_io.so Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,560 @@
############################################################################
# Copyright (C) gempa GmbH #
# All rights reserved. #
# Contact: gempa GmbH (seiscomp-dev@gempa.de) #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
# #
# Other Usage #
# Alternatively, this file may be used in accordance with the terms and #
# conditions contained in a signed written agreement between you and #
# gempa GmbH. #
############################################################################
import os, time, sys
import seiscomp.core, seiscomp.client, seiscomp.datamodel
import seiscomp.io, seiscomp.system
def collectParams(container):
params = {}
for i in range(container.groupCount()):
params.update(collectParams(container.group(i)))
for i in range(container.structureCount()):
params.update(collectParams(container.structure(i)))
for i in range(container.parameterCount()):
p = container.parameter(i)
if p.symbol.stage == seiscomp.system.Environment.CS_UNDEFINED:
continue
params[p.variableName] = ",".join(p.symbol.values)
return params
def collect(idset, paramSetID):
paramSet = seiscomp.datamodel.ParameterSet.Find(paramSetID)
if not paramSet:
return
idset[paramSet.publicID()] = 1
if not paramSet.baseID():
return
collect(idset, paramSet.baseID())
def sync(paramSet, params):
obsoleteParams = []
seenParams = {}
i = 0
while i < paramSet.parameterCount():
p = paramSet.parameter(i)
if p.name() in params:
if p.name() in seenParams:
# Multiple parameter definitions with same name
sys.stderr.write(
f"- {p.publicID()}:{p.name()} / duplicate parameter name\n"
)
p.detach()
continue
seenParams[p.name()] = 1
val = params[p.name()]
if val != p.value():
p.setValue(val)
p.update()
else:
obsoleteParams.append(p)
i = i + 1
for p in obsoleteParams:
p.detach()
for key, val in list(params.items()):
if key in seenParams:
continue
p = seiscomp.datamodel.Parameter.Create()
p.setName(key)
p.setValue(val)
paramSet.add(p)
class ConfigDBUpdater(seiscomp.client.Application):
def __init__(self, argc, argv):
seiscomp.client.Application.__init__(self, argc, argv)
self.setLoggingToStdErr(True)
self.setMessagingEnabled(True)
self.setDatabaseEnabled(True, True)
self.setAutoApplyNotifierEnabled(False)
self.setInterpretNotifierEnabled(False)
self.setMessagingUsername("_sccfgupd_")
self.setLoadConfigModuleEnabled(True)
# Load all configuration modules
self.setConfigModuleName("")
self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP)
self._moduleName = None
self._outputFile = None
self._createNotifier = False
self._keyDir = None
def createCommandLineDescription(self):
self.commandline().addGroup("Input")
self.commandline().addStringOption(
"Input",
"key-dir",
"Overrides the location of the default key directory ($SEISCOMP_ROOT/etc/key)",
)
self.commandline().addGroup("Output")
self.commandline().addStringOption(
"Output", "module-name", "The module name to be used for the config module. If not given then the application name is being used or 'trunk' if output to a file is enabled"
)
self.commandline().addStringOption(
"Output", "output,o", "If given, an output XML file is generated"
)
self.commandline().addOption(
"Output", "create-notifier", "If given then a notifier message containing all notifiers "
"will be written to the output XML. This option only applies "
"if an output file is given. Notifier creation either requires "
"and input database and an input config XML as reference."
)
def validateParameters(self):
if not seiscomp.client.Application.validateParameters(self):
return False
try:
self._moduleName = self.commandline().optionString("module-name")
except:
pass
try:
self._outputFile = self.commandline().optionString("output")
self._createNotifier = self.commandline().hasOption("create-notifier")
# Switch to offline mode
self.setMessagingEnabled(False)
self.setDatabaseEnabled(False, False)
if self._createNotifier:
if self.isConfigDatabaseEnabled() == True:
self.setDatabaseEnabled(True, False);
else:
self.setLoadConfigModuleEnabled(False)
except:
pass
try:
self._keyDir = self.commandline().optionString("key-dir")
except:
pass
return True
def init(self):
if not seiscomp.client.Application.init(self):
return False
# Initialize the basic directories
filebase = seiscomp.system.Environment.Instance().installDir()
descdir = os.path.join(filebase, "etc", "descriptions")
# Load definitions of the configuration schema
defs = seiscomp.system.SchemaDefinitions()
if not defs.load(descdir):
print("Error: could not read descriptions", file=sys.stderr)
return False
if defs.moduleCount() == 0:
print("Warning: no modules defined, nothing to do", file=sys.stderr)
return False
# Create a model from the schema and read its configuration including
# all bindings.
model = seiscomp.system.Model()
if self._keyDir:
model.keyDirOverride = self._keyDir
model.create(defs)
model.readConfig()
# Find all binding mods for trunk. Bindings of modules where standalone
# is set to true are ignored. They are supposed to handle their bindings
# on their own.
self.bindingMods = []
for i in range(defs.moduleCount()):
mod = defs.module(i)
# Ignore stand alone modules (eg seedlink, slarchive, ...) as they
# are not using the trunk libraries and don't need database
# configurations
if mod.isStandalone():
continue
self.bindingMods.append(mod.name)
if len(self.bindingMods) == 0:
print("Warning: no usable modules found, nothing to do", file=sys.stderr)
return False
self.stationSetups = {}
# Read bindings
for m in self.bindingMods:
mod = model.module(m)
if not mod:
print(f"Warning: module {m} not assigned", file=sys.stderr)
continue
if len(mod.bindings) == 0:
continue
if len(m) > 20:
print(
f"Error: rejecting module {m} - name is longer than 20 characters",
file=sys.stderr,
)
return False
# Rename global to default for being compatible with older
# releases
if m == "global":
m = "default"
print(f"+ {m}", file=sys.stderr)
for staid in list(mod.bindings.keys()):
binding = mod.getBinding(staid)
if not binding:
continue
# sys.stderr.write(" + %s.%s\n" % (staid.networkCode, staid.stationCode))
params = {}
for i in range(binding.sectionCount()):
params.update(collectParams(binding.section(i)))
key = (staid.networkCode, staid.stationCode)
if not key in self.stationSetups:
self.stationSetups[key] = {}
self.stationSetups[key][m] = params
print(
f" + read {len(list(mod.bindings.keys()))} stations", file=sys.stderr
)
return True
def printUsage(self):
print(
"""Usage:
bindings2cfg [options]
Synchronize bindings from key files with processing system or output as
configuration XML file"""
)
seiscomp.client.Application.printUsage(self)
print(
"""Examples:
Write bindings configuration from key directory to a configuration XML file:
bindings2cfg --key-dir ./etc/key -o config.xml
Synchronize bindings configuration from key directory to a processing system
bindings2cfg --key-dir ./etc/key -H proc
"""
)
return True
def send(self, *args):
"""
A simple wrapper that sends a message and tries to resend it in case of
an error.
"""
while not self.connection().send(*args):
print("Warning: sending failed, retrying", file=sys.stderr)
time.sleep(1)
def run(self):
"""
Reimplements the main loop of the application. This methods collects
all bindings and updates the database. It searches for already existing
objects and updates them or creates new objects. Objects that is didn't
touched are removed. This tool is the only one that should writes the
configuration into the database and thus manages the content.
"""
config = seiscomp.client.ConfigDB.Instance().config()
if config is None:
config = seiscomp.datamodel.Config()
configMod = None
obsoleteConfigMods = []
moduleName = self._moduleName
if self._outputFile is None or self._createNotifier == True:
if not moduleName:
moduleName = self.name()
seiscomp.datamodel.Notifier.Enable()
else:
if not moduleName:
moduleName = "trunk"
configID = f"Config/{moduleName}"
for i in range(config.configModuleCount()):
if config.configModule(i).publicID() != configID:
obsoleteConfigMods.append(config.configModule(i))
else:
configMod = config.configModule(i)
# Remove obsolete config modules
for cm in obsoleteConfigMods:
print(f"- {cm.name()} / obsolete module configuration", file=sys.stderr)
ps = seiscomp.datamodel.ParameterSet.Find(cm.parameterSetID())
if not ps is None:
ps.detach()
cm.detach()
del obsoleteConfigMods
if not configMod:
configMod = seiscomp.datamodel.ConfigModule.Find(configID)
if configMod is None:
configMod = seiscomp.datamodel.ConfigModule.Create(configID)
config.add(configMod)
else:
if configMod.name() != moduleName:
configMod.update()
if not configMod.enabled():
configMod.update()
configMod.setName(moduleName)
configMod.setEnabled(True)
else:
if configMod.name() != moduleName:
configMod.setName(moduleName)
configMod.update()
paramSet = seiscomp.datamodel.ParameterSet.Find(configMod.parameterSetID())
if configMod.parameterSetID():
configMod.setParameterSetID("")
configMod.update()
if not paramSet is None:
paramSet.detach()
stationConfigs = {}
obsoleteStationConfigs = []
for i in range(configMod.configStationCount()):
cs = configMod.configStation(i)
if (cs.networkCode(), cs.stationCode()) in self.stationSetups:
stationConfigs[(cs.networkCode(), cs.stationCode())] = cs
else:
obsoleteStationConfigs.append(cs)
for cs in obsoleteStationConfigs:
print(
f"- {configMod.name()}/{cs.networkCode()}/{cs.stationCode()} / obsolete "
"station configuration",
file=sys.stderr,
)
cs.detach()
del obsoleteStationConfigs
for staid, setups in list(self.stationSetups.items()):
try:
cs = stationConfigs[staid]
except:
cs = seiscomp.datamodel.ConfigStation.Find(
f"Config/{configMod.name()}/{staid[0]}/{staid[1]}"
)
if not cs:
cs = seiscomp.datamodel.ConfigStation.Create(
f"Config/{configMod.name()}/{staid[0]}/{staid[1]}"
)
configMod.add(cs)
cs.setNetworkCode(staid[0])
cs.setStationCode(staid[1])
cs.setEnabled(True)
ci = seiscomp.datamodel.CreationInfo()
ci.setCreationTime(seiscomp.core.Time.GMT())
ci.setAgencyID(self.agencyID())
ci.setAuthor(self.name())
cs.setCreationInfo(ci)
stationSetups = {}
obsoleteSetups = []
for i in range(cs.setupCount()):
setup = cs.setup(i)
if setup.name() in setups:
stationSetups[setup.name()] = setup
else:
obsoleteSetups.append(setup)
for s in obsoleteSetups:
print(
f"- {configMod.name()}/{cs.networkCode()}/{cs.stationCode()}/{setup.name()} "
"/ obsolete station setup",
file=sys.stderr,
)
ps = seiscomp.datamodel.ParameterSet.Find(s.parameterSetID())
if ps:
ps.detach()
s.detach()
del obsoleteSetups
newParamSets = {}
globalSet = ""
for mod, params in list(setups.items()):
try:
setup = stationSetups[mod]
except:
setup = seiscomp.datamodel.Setup()
setup.setName(mod)
setup.setEnabled(True)
cs.add(setup)
paramSet = seiscomp.datamodel.ParameterSet.Find(setup.parameterSetID())
if not paramSet:
paramSet = seiscomp.datamodel.ParameterSet.Find(
"ParameterSet/%s/Station/%s/%s/%s"
% (
configMod.name(),
cs.networkCode(),
cs.stationCode(),
setup.name(),
)
)
if not paramSet:
paramSet = seiscomp.datamodel.ParameterSet.Create(
"ParameterSet/%s/Station/%s/%s/%s"
% (
configMod.name(),
cs.networkCode(),
cs.stationCode(),
setup.name(),
)
)
config.add(paramSet)
paramSet.setModuleID(configMod.publicID())
paramSet.setCreated(seiscomp.core.Time.GMT())
newParamSets[paramSet.publicID()] = 1
setup.setParameterSetID(paramSet.publicID())
if mod in stationSetups:
setup.update()
elif paramSet.moduleID() != configMod.publicID():
paramSet.setModuleID(configMod.publicID())
paramSet.update()
# Synchronize existing parameterset with the new parameters
sync(paramSet, params)
if setup.name() == "default":
globalSet = paramSet.publicID()
for i in range(cs.setupCount()):
setup = cs.setup(i)
paramSet = seiscomp.datamodel.ParameterSet.Find(setup.parameterSetID())
if not paramSet:
continue
if paramSet.publicID() != globalSet and paramSet.baseID() != globalSet:
paramSet.setBaseID(globalSet)
if not paramSet.publicID() in newParamSets:
paramSet.update()
# Collect unused ParameterSets
usedSets = {}
for i in range(config.configModuleCount()):
configMod = config.configModule(i)
for j in range(configMod.configStationCount()):
cs = configMod.configStation(j)
for k in range(cs.setupCount()):
setup = cs.setup(k)
collect(usedSets, setup.parameterSetID())
# Delete unused ParameterSets
i = 0
while i < config.parameterSetCount():
paramSet = config.parameterSet(i)
if not paramSet.publicID() in usedSets:
print(
f"- {paramSet.publicID()} / obsolete parameter set", file=sys.stderr
)
paramSet.detach()
else:
i = i + 1
# Generate output file and exit if configured
if self._outputFile is not None:
ar = seiscomp.io.XMLArchive()
if not ar.create(self._outputFile):
print(
f"Failed to created output file: {self._outputFile}",
file=sys.stderr,
)
return False
ar.setFormattedOutput(True)
if self._createNotifier:
nmsg = seiscomp.datamodel.Notifier.GetMessage(True)
ar.writeObject(nmsg)
else:
ar.writeObject(config)
ar.close()
return True
ncount = seiscomp.datamodel.Notifier.Size()
if ncount > 0:
print(f"+ synchronize {ncount} change(s)", file=sys.stderr)
else:
print("- database is already up-to-date", file=sys.stderr)
return True
cfgmsg = seiscomp.datamodel.ConfigSyncMessage(False)
cfgmsg.setCreationInfo(seiscomp.datamodel.CreationInfo())
cfgmsg.creationInfo().setCreationTime(seiscomp.core.Time.GMT())
cfgmsg.creationInfo().setAuthor(self.author())
cfgmsg.creationInfo().setAgencyID(self.agencyID())
self.send(seiscomp.client.Protocol.STATUS_GROUP, cfgmsg)
# Send messages in a batch of 100 notifiers to not exceed the
# maximum allowed message size of ~300kb.
msg = seiscomp.datamodel.NotifierMessage()
nmsg = seiscomp.datamodel.Notifier.GetMessage(False)
count = 0
sys.stderr.write("\r + sending notifiers: %d%%" % (count * 100 / ncount))
sys.stderr.flush()
while nmsg:
for o in nmsg:
n = seiscomp.datamodel.Notifier.Cast(o)
if n:
msg.attach(n)
if msg.size() >= 100:
count += msg.size()
self.send("CONFIG", msg)
msg.clear()
sys.stderr.write(
"\r + sending notifiers: %d%%" % (count * 100 / ncount)
)
sys.stderr.flush()
nmsg = seiscomp.datamodel.Notifier.GetMessage(False)
if msg.size() > 0:
count += msg.size()
self.send("CONFIG", msg)
msg.clear()
sys.stderr.write("\r + sending notifiers: %d%%" % (count * 100 / ncount))
sys.stderr.flush()
sys.stderr.write("\n")
# Notify about end of synchronization
cfgmsg.creationInfo().setCreationTime(seiscomp.core.Time.GMT())
cfgmsg.isFinished = True
self.send(seiscomp.client.Protocol.STATUS_GROUP, cfgmsg)
return True
def main():
app = ConfigDBUpdater(len(sys.argv), sys.argv)
return app()

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,857 @@
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 4.0.2
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info as _swig_python_version_info
if _swig_python_version_info < (2, 7, 0):
raise RuntimeError("Python 2.7 or later required")
# Import the low-level C/C++ module
if __package__ or "." in __name__:
from . import _config
else:
import _config
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
def _swig_setattr_nondynamic_instance_variable(set):
def set_instance_attr(self, name, value):
if name == "thisown":
self.this.own(value)
elif name == "this":
set(self, name, value)
elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
set(self, name, value)
else:
raise AttributeError("You cannot add instance attributes to %s" % self)
return set_instance_attr
def _swig_setattr_nondynamic_class_variable(set):
def set_class_attr(cls, name, value):
if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
set(cls, name, value)
else:
raise AttributeError("You cannot add class attributes to %s" % cls)
return set_class_attr
def _swig_add_metaclass(metaclass):
"""Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
def wrapper(cls):
return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
return wrapper
class _SwigNonDynamicMeta(type):
"""Meta class to enforce nondynamic attributes (no new attributes) for a class"""
__setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
import weakref
class SwigPyIterator(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _config.delete_SwigPyIterator
def value(self):
return _config.SwigPyIterator_value(self)
def incr(self, n=1):
return _config.SwigPyIterator_incr(self, n)
def decr(self, n=1):
return _config.SwigPyIterator_decr(self, n)
def distance(self, x):
return _config.SwigPyIterator_distance(self, x)
def equal(self, x):
return _config.SwigPyIterator_equal(self, x)
def copy(self):
return _config.SwigPyIterator_copy(self)
def next(self):
return _config.SwigPyIterator_next(self)
def __next__(self):
return _config.SwigPyIterator___next__(self)
def previous(self):
return _config.SwigPyIterator_previous(self)
def advance(self, n):
return _config.SwigPyIterator_advance(self, n)
def __eq__(self, x):
return _config.SwigPyIterator___eq__(self, x)
def __ne__(self, x):
return _config.SwigPyIterator___ne__(self, x)
def __iadd__(self, n):
return _config.SwigPyIterator___iadd__(self, n)
def __isub__(self, n):
return _config.SwigPyIterator___isub__(self, n)
def __add__(self, n):
return _config.SwigPyIterator___add__(self, n)
def __sub__(self, *args):
return _config.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
# Register SwigPyIterator in _config:
_config.SwigPyIterator_swigregister(SwigPyIterator)
ERROR = _config.ERROR
WARNING = _config.WARNING
INFO = _config.INFO
DEBUG = _config.DEBUG
class Logger(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
__swig_destroy__ = _config.delete_Logger
def log(self, arg0, filename, line, msg):
return _config.Logger_log(self, arg0, filename, line, msg)
def __init__(self):
if self.__class__ == Logger:
_self = None
else:
_self = self
_config.Logger_swiginit(self, _config.new_Logger(_self, ))
def __disown__(self):
self.this.disown()
_config.disown_Logger(self)
return weakref.proxy(self)
# Register Logger in _config:
_config.Logger_swigregister(Logger)
class Exception(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, *args):
_config.Exception_swiginit(self, _config.new_Exception(*args))
__swig_destroy__ = _config.delete_Exception
def what(self):
return _config.Exception_what(self)
# Register Exception in _config:
_config.Exception_swigregister(Exception)
cvar = _config.cvar
class OptionNotFoundException(Exception):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, *args):
_config.OptionNotFoundException_swiginit(self, _config.new_OptionNotFoundException(*args))
__swig_destroy__ = _config.delete_OptionNotFoundException
# Register OptionNotFoundException in _config:
_config.OptionNotFoundException_swigregister(OptionNotFoundException)
class TypeConversionException(Exception):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, *args):
_config.TypeConversionException_swiginit(self, _config.new_TypeConversionException(*args))
__swig_destroy__ = _config.delete_TypeConversionException
# Register TypeConversionException in _config:
_config.TypeConversionException_swigregister(TypeConversionException)
class SyntaxException(Exception):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, *args):
_config.SyntaxException_swiginit(self, _config.new_SyntaxException(*args))
__swig_destroy__ = _config.delete_SyntaxException
# Register SyntaxException in _config:
_config.SyntaxException_swigregister(SyntaxException)
class CaseSensitivityException(Exception):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, *args):
_config.CaseSensitivityException_swiginit(self, _config.new_CaseSensitivityException(*args))
__swig_destroy__ = _config.delete_CaseSensitivityException
# Register CaseSensitivityException in _config:
_config.CaseSensitivityException_swigregister(CaseSensitivityException)
class Symbol(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, *args):
_config.Symbol_swiginit(self, _config.new_Symbol(*args))
def set(self, name, ns, values, uri, comment, stage=-1):
return _config.Symbol_set(self, name, ns, values, uri, comment, stage)
def __eq__(self, symbol):
return _config.Symbol___eq__(self, symbol)
def toString(self):
return _config.Symbol_toString(self)
name = property(_config.Symbol_name_get, _config.Symbol_name_set)
ns = property(_config.Symbol_ns_get, _config.Symbol_ns_set)
content = property(_config.Symbol_content_get, _config.Symbol_content_set)
values = property(_config.Symbol_values_get, _config.Symbol_values_set)
uri = property(_config.Symbol_uri_get, _config.Symbol_uri_set)
comment = property(_config.Symbol_comment_get, _config.Symbol_comment_set)
stage = property(_config.Symbol_stage_get, _config.Symbol_stage_set)
line = property(_config.Symbol_line_get, _config.Symbol_line_set)
__swig_destroy__ = _config.delete_Symbol
# Register Symbol in _config:
_config.Symbol_swigregister(Symbol)
class SymbolTable(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self):
_config.SymbolTable_swiginit(self, _config.new_SymbolTable())
def setCaseSensitivityCheck(self, arg2):
return _config.SymbolTable_setCaseSensitivityCheck(self, arg2)
def setLogger(self, arg2):
return _config.SymbolTable_setLogger(self, arg2)
def logger(self):
return _config.SymbolTable_logger(self)
def add(self, *args):
return _config.SymbolTable_add(self, *args)
def get(self, *args):
return _config.SymbolTable_get(self, *args)
def remove(self, name):
return _config.SymbolTable_remove(self, name)
def incrementObjectCount(self):
return _config.SymbolTable_incrementObjectCount(self)
def decrementObjectCount(self):
return _config.SymbolTable_decrementObjectCount(self)
def objectCount(self):
return _config.SymbolTable_objectCount(self)
def toString(self):
return _config.SymbolTable_toString(self)
def hasFileBeenIncluded(self, fileName):
return _config.SymbolTable_hasFileBeenIncluded(self, fileName)
def addToIncludedFiles(self, fileName):
return _config.SymbolTable_addToIncludedFiles(self, fileName)
def includesBegin(self):
return _config.SymbolTable_includesBegin(self)
def includesEnd(self):
return _config.SymbolTable_includesEnd(self)
def begin(self):
return _config.SymbolTable_begin(self)
def end(self):
return _config.SymbolTable_end(self)
__swig_destroy__ = _config.delete_SymbolTable
# Register SymbolTable in _config:
_config.SymbolTable_swigregister(SymbolTable)
class Config(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self):
_config.Config_swiginit(self, _config.new_Config())
__swig_destroy__ = _config.delete_Config
def setCaseSensitivityCheck(self, arg2):
return _config.Config_setCaseSensitivityCheck(self, arg2)
def readConfig(self, file, stage=-1, raw=False):
return _config.Config_readConfig(self, file, stage, raw)
def writeConfig(self, *args):
return _config.Config_writeConfig(self, *args)
def setLogger(self, logger):
return _config.Config_setLogger(self, logger)
def symbolsToString(self):
return _config.Config_symbolsToString(self)
def names(self):
return _config.Config_names(self)
def visitedFilesToString(self):
return _config.Config_visitedFilesToString(self)
def getInt(self, *args):
return _config.Config_getInt(self, *args)
def setInt(self, name, value):
return _config.Config_setInt(self, name, value)
def getDouble(self, *args):
return _config.Config_getDouble(self, *args)
def setDouble(self, name, value):
return _config.Config_setDouble(self, name, value)
def getBool(self, *args):
return _config.Config_getBool(self, *args)
def setBool(self, name, value):
return _config.Config_setBool(self, name, value)
def getString(self, *args):
return _config.Config_getString(self, *args)
def setString(self, name, value):
return _config.Config_setString(self, name, value)
def remove(self, name):
return _config.Config_remove(self, name)
def getInts(self, *args):
return _config.Config_getInts(self, *args)
def setInts(self, name, values):
return _config.Config_setInts(self, name, values)
def getDoubles(self, *args):
return _config.Config_getDoubles(self, *args)
def setDoubles(self, name, values):
return _config.Config_setDoubles(self, name, values)
def getBools(self, *args):
return _config.Config_getBools(self, *args)
def setBools(self, name, values):
return _config.Config_setBools(self, name, values)
def getStrings(self, *args):
return _config.Config_getStrings(self, *args)
def setStrings(self, name, values):
return _config.Config_setStrings(self, name, values)
def symbolTable(self):
return _config.Config_symbolTable(self)
def eval(self, rvalue, result, resolveReferences=True, errmsg=None):
return _config.Config_eval(self, rvalue, result, resolveReferences, errmsg)
@staticmethod
def Eval(rvalue, result, resolveReferences=True, symtab=None, errmsg=None):
return _config.Config_Eval(rvalue, result, resolveReferences, symtab, errmsg)
@staticmethod
def writeValues(os, symbol, multilineLists=False):
return _config.Config_writeValues(os, symbol, multilineLists)
@staticmethod
def writeContent(os, symbol, multilineLists=False):
return _config.Config_writeContent(os, symbol, multilineLists)
@staticmethod
def writeSymbol(os, symbol, multilineLists=False):
return _config.Config_writeSymbol(os, symbol, multilineLists)
@staticmethod
def escapeIdentifier(arg1):
return _config.Config_escapeIdentifier(arg1)
def trackVariables(self, enabled):
return _config.Config_trackVariables(self, enabled)
def getVariables(self):
return _config.Config_getVariables(self)
def escape(self, arg2):
return _config.Config_escape(self, arg2)
# Register Config in _config:
_config.Config_swigregister(Config)
def Config_Eval(rvalue, result, resolveReferences=True, symtab=None, errmsg=None):
return _config.Config_Eval(rvalue, result, resolveReferences, symtab, errmsg)
def Config_writeValues(os, symbol, multilineLists=False):
return _config.Config_writeValues(os, symbol, multilineLists)
def Config_writeContent(os, symbol, multilineLists=False):
return _config.Config_writeContent(os, symbol, multilineLists)
def Config_writeSymbol(os, symbol, multilineLists=False):
return _config.Config_writeSymbol(os, symbol, multilineLists)
def Config_escapeIdentifier(arg1):
return _config.Config_escapeIdentifier(arg1)
class VectorStr(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def iterator(self):
return _config.VectorStr_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _config.VectorStr___nonzero__(self)
def __bool__(self):
return _config.VectorStr___bool__(self)
def __len__(self):
return _config.VectorStr___len__(self)
def __getslice__(self, i, j):
return _config.VectorStr___getslice__(self, i, j)
def __setslice__(self, *args):
return _config.VectorStr___setslice__(self, *args)
def __delslice__(self, i, j):
return _config.VectorStr___delslice__(self, i, j)
def __delitem__(self, *args):
return _config.VectorStr___delitem__(self, *args)
def __getitem__(self, *args):
return _config.VectorStr___getitem__(self, *args)
def __setitem__(self, *args):
return _config.VectorStr___setitem__(self, *args)
def pop(self):
return _config.VectorStr_pop(self)
def append(self, x):
return _config.VectorStr_append(self, x)
def empty(self):
return _config.VectorStr_empty(self)
def size(self):
return _config.VectorStr_size(self)
def swap(self, v):
return _config.VectorStr_swap(self, v)
def begin(self):
return _config.VectorStr_begin(self)
def end(self):
return _config.VectorStr_end(self)
def rbegin(self):
return _config.VectorStr_rbegin(self)
def rend(self):
return _config.VectorStr_rend(self)
def clear(self):
return _config.VectorStr_clear(self)
def get_allocator(self):
return _config.VectorStr_get_allocator(self)
def pop_back(self):
return _config.VectorStr_pop_back(self)
def erase(self, *args):
return _config.VectorStr_erase(self, *args)
def __init__(self, *args):
_config.VectorStr_swiginit(self, _config.new_VectorStr(*args))
def push_back(self, x):
return _config.VectorStr_push_back(self, x)
def front(self):
return _config.VectorStr_front(self)
def back(self):
return _config.VectorStr_back(self)
def assign(self, n, x):
return _config.VectorStr_assign(self, n, x)
def resize(self, *args):
return _config.VectorStr_resize(self, *args)
def insert(self, *args):
return _config.VectorStr_insert(self, *args)
def reserve(self, n):
return _config.VectorStr_reserve(self, n)
def capacity(self):
return _config.VectorStr_capacity(self)
__swig_destroy__ = _config.delete_VectorStr
# Register VectorStr in _config:
_config.VectorStr_swigregister(VectorStr)
class VectorInt(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def iterator(self):
return _config.VectorInt_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _config.VectorInt___nonzero__(self)
def __bool__(self):
return _config.VectorInt___bool__(self)
def __len__(self):
return _config.VectorInt___len__(self)
def __getslice__(self, i, j):
return _config.VectorInt___getslice__(self, i, j)
def __setslice__(self, *args):
return _config.VectorInt___setslice__(self, *args)
def __delslice__(self, i, j):
return _config.VectorInt___delslice__(self, i, j)
def __delitem__(self, *args):
return _config.VectorInt___delitem__(self, *args)
def __getitem__(self, *args):
return _config.VectorInt___getitem__(self, *args)
def __setitem__(self, *args):
return _config.VectorInt___setitem__(self, *args)
def pop(self):
return _config.VectorInt_pop(self)
def append(self, x):
return _config.VectorInt_append(self, x)
def empty(self):
return _config.VectorInt_empty(self)
def size(self):
return _config.VectorInt_size(self)
def swap(self, v):
return _config.VectorInt_swap(self, v)
def begin(self):
return _config.VectorInt_begin(self)
def end(self):
return _config.VectorInt_end(self)
def rbegin(self):
return _config.VectorInt_rbegin(self)
def rend(self):
return _config.VectorInt_rend(self)
def clear(self):
return _config.VectorInt_clear(self)
def get_allocator(self):
return _config.VectorInt_get_allocator(self)
def pop_back(self):
return _config.VectorInt_pop_back(self)
def erase(self, *args):
return _config.VectorInt_erase(self, *args)
def __init__(self, *args):
_config.VectorInt_swiginit(self, _config.new_VectorInt(*args))
def push_back(self, x):
return _config.VectorInt_push_back(self, x)
def front(self):
return _config.VectorInt_front(self)
def back(self):
return _config.VectorInt_back(self)
def assign(self, n, x):
return _config.VectorInt_assign(self, n, x)
def resize(self, *args):
return _config.VectorInt_resize(self, *args)
def insert(self, *args):
return _config.VectorInt_insert(self, *args)
def reserve(self, n):
return _config.VectorInt_reserve(self, n)
def capacity(self):
return _config.VectorInt_capacity(self)
__swig_destroy__ = _config.delete_VectorInt
# Register VectorInt in _config:
_config.VectorInt_swigregister(VectorInt)
class VectorDouble(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def iterator(self):
return _config.VectorDouble_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _config.VectorDouble___nonzero__(self)
def __bool__(self):
return _config.VectorDouble___bool__(self)
def __len__(self):
return _config.VectorDouble___len__(self)
def __getslice__(self, i, j):
return _config.VectorDouble___getslice__(self, i, j)
def __setslice__(self, *args):
return _config.VectorDouble___setslice__(self, *args)
def __delslice__(self, i, j):
return _config.VectorDouble___delslice__(self, i, j)
def __delitem__(self, *args):
return _config.VectorDouble___delitem__(self, *args)
def __getitem__(self, *args):
return _config.VectorDouble___getitem__(self, *args)
def __setitem__(self, *args):
return _config.VectorDouble___setitem__(self, *args)
def pop(self):
return _config.VectorDouble_pop(self)
def append(self, x):
return _config.VectorDouble_append(self, x)
def empty(self):
return _config.VectorDouble_empty(self)
def size(self):
return _config.VectorDouble_size(self)
def swap(self, v):
return _config.VectorDouble_swap(self, v)
def begin(self):
return _config.VectorDouble_begin(self)
def end(self):
return _config.VectorDouble_end(self)
def rbegin(self):
return _config.VectorDouble_rbegin(self)
def rend(self):
return _config.VectorDouble_rend(self)
def clear(self):
return _config.VectorDouble_clear(self)
def get_allocator(self):
return _config.VectorDouble_get_allocator(self)
def pop_back(self):
return _config.VectorDouble_pop_back(self)
def erase(self, *args):
return _config.VectorDouble_erase(self, *args)
def __init__(self, *args):
_config.VectorDouble_swiginit(self, _config.new_VectorDouble(*args))
def push_back(self, x):
return _config.VectorDouble_push_back(self, x)
def front(self):
return _config.VectorDouble_front(self)
def back(self):
return _config.VectorDouble_back(self)
def assign(self, n, x):
return _config.VectorDouble_assign(self, n, x)
def resize(self, *args):
return _config.VectorDouble_resize(self, *args)
def insert(self, *args):
return _config.VectorDouble_insert(self, *args)
def reserve(self, n):
return _config.VectorDouble_reserve(self, n)
def capacity(self):
return _config.VectorDouble_capacity(self)
__swig_destroy__ = _config.delete_VectorDouble
# Register VectorDouble in _config:
_config.VectorDouble_swigregister(VectorDouble)
class VectorBool(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def iterator(self):
return _config.VectorBool_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _config.VectorBool___nonzero__(self)
def __bool__(self):
return _config.VectorBool___bool__(self)
def __len__(self):
return _config.VectorBool___len__(self)
def __getslice__(self, i, j):
return _config.VectorBool___getslice__(self, i, j)
def __setslice__(self, *args):
return _config.VectorBool___setslice__(self, *args)
def __delslice__(self, i, j):
return _config.VectorBool___delslice__(self, i, j)
def __delitem__(self, *args):
return _config.VectorBool___delitem__(self, *args)
def __getitem__(self, *args):
return _config.VectorBool___getitem__(self, *args)
def __setitem__(self, *args):
return _config.VectorBool___setitem__(self, *args)
def pop(self):
return _config.VectorBool_pop(self)
def append(self, x):
return _config.VectorBool_append(self, x)
def empty(self):
return _config.VectorBool_empty(self)
def size(self):
return _config.VectorBool_size(self)
def swap(self, v):
return _config.VectorBool_swap(self, v)
def begin(self):
return _config.VectorBool_begin(self)
def end(self):
return _config.VectorBool_end(self)
def rbegin(self):
return _config.VectorBool_rbegin(self)
def rend(self):
return _config.VectorBool_rend(self)
def clear(self):
return _config.VectorBool_clear(self)
def get_allocator(self):
return _config.VectorBool_get_allocator(self)
def pop_back(self):
return _config.VectorBool_pop_back(self)
def erase(self, *args):
return _config.VectorBool_erase(self, *args)
def __init__(self, *args):
_config.VectorBool_swiginit(self, _config.new_VectorBool(*args))
def push_back(self, x):
return _config.VectorBool_push_back(self, x)
def front(self):
return _config.VectorBool_front(self)
def back(self):
return _config.VectorBool_back(self)
def assign(self, n, x):
return _config.VectorBool_assign(self, n, x)
def resize(self, *args):
return _config.VectorBool_resize(self, *args)
def insert(self, *args):
return _config.VectorBool_insert(self, *args)
def reserve(self, n):
return _config.VectorBool_reserve(self, n)
def capacity(self):
return _config.VectorBool_capacity(self)
__swig_destroy__ = _config.delete_VectorBool
# Register VectorBool in _config:
_config.VectorBool_swigregister(VectorBool)

2769
lib/python/seiscomp/core.py Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Binary file not shown.

View File

View File

@ -0,0 +1,85 @@
################################################################################
# Copyright (C) 2013-2014 by gempa GmbH
#
# HTTP -- Utility methods which generate HTTP result strings
#
# Author: Stephan Herrnkind
# Email: herrnkind@gempa.de
################################################################################
import base64
import datetime
import hashlib
import json
import time
import dateutil.parser
from twisted.web import http
import gnupg
import seiscomp.logging
from .utils import accessLog, u_str
from .http import BaseResource
################################################################################
class AuthResource(BaseResource):
isLeaf = True
def __init__(self, version, gnupghome, userdb):
super().__init__(version)
self.__gpg = gnupg.GPG(gnupghome=gnupghome)
self.__userdb = userdb
# ---------------------------------------------------------------------------
def render_POST(self, request):
request.setHeader("Content-Type", "text/plain; charset=utf-8")
try:
verified = self.__gpg.decrypt(request.content.getvalue())
except OSError as e:
msg = "gpg decrypt error"
seiscomp.logging.warning(f"{msg}: {e}")
return self.renderErrorPage(request, http.INTERNAL_SERVER_ERROR, msg)
except Exception as e:
msg = "invalid token"
seiscomp.logging.warning(f"{msg}: {e}")
return self.renderErrorPage(request, http.BAD_REQUEST, msg)
if verified.trust_level is None or verified.trust_level < verified.TRUST_FULLY:
msg = "token has invalid signature"
seiscomp.logging.warning(msg)
return self.renderErrorPage(request, http.BAD_REQUEST, msg)
try:
attributes = json.loads(u_str(verified.data))
td = dateutil.parser.parse(
attributes["valid_until"]
) - datetime.datetime.now(dateutil.tz.tzutc())
lifetime = td.seconds + td.days * 24 * 3600
except Exception as e:
msg = "token has invalid validity"
seiscomp.logging.warning(f"{msg}: {e}")
return self.renderErrorPage(request, http.BAD_REQUEST, msg)
if lifetime <= 0:
msg = "token is expired"
seiscomp.logging.warning(msg)
return self.renderErrorPage(request, http.BAD_REQUEST, msg)
userid = base64.urlsafe_b64encode(hashlib.sha256(verified.data).digest()[:18])
password = self.__userdb.addUser(
u_str(userid),
attributes,
time.time() + min(lifetime, 24 * 3600),
u_str(verified.data),
)
accessLog(request, None, http.OK, len(userid) + len(password) + 1, None)
return userid + b":" + password

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,796 @@
################################################################################
# Copyright (C) 2013-2014 by gempa GmbH
#
# FDSNDataSelect -- Implements the fdsnws-dataselect Web service, see
# http://www.fdsn.org/webservices/
#
# Feature notes:
# - 'quality' request parameter not implemented (information not available in
# SeisComP)
# - 'minimumlength' parameter is not implemented
# - 'longestonly' parameter is not implemented
#
# Author: Stephan Herrnkind
# Email: herrnkind@gempa.de
################################################################################
import time
from io import BytesIO
import dateutil.parser
from twisted.cred import portal
from twisted.web import http, resource, server
from twisted.internet import interfaces, reactor
from zope.interface import implementer
from seiscomp import logging, mseedlite
from seiscomp.client import Application
from seiscomp.core import Array, Record, Time
from seiscomp.io import RecordInput, RecordStream
from .http import HTTP, BaseResource
from .request import RequestOptions
from . import utils
from .reqtrack import RequestTrackerDB
from .fastsds import SDS
VERSION = "1.1.3"
################################################################################
class _DataSelectRequestOptions(RequestOptions):
MinTime = Time(0, 1)
PQuality = ["quality"]
PMinimumLength = ["minimumlength"]
PLongestOnly = ["longestonly"]
QualityValues = ["B", "D", "M", "Q", "R"]
OutputFormats = ["miniseed", "mseed"]
POSTParams = RequestOptions.POSTParams + PQuality + PMinimumLength + PLongestOnly
GETParams = RequestOptions.GETParams + POSTParams
# ---------------------------------------------------------------------------
def __init__(self):
super().__init__()
self.service = "fdsnws-dataselect"
self.quality = self.QualityValues[0]
self.minimumLength = None
self.longestOnly = None
# ---------------------------------------------------------------------------
def _checkTimes(self, realtimeGap):
maxEndTime = Time(self.accessTime)
if realtimeGap is not None:
maxEndTime -= Time(realtimeGap, 0)
for ro in self.streams:
# create time if non was specified
if ro.time is None:
ro.time = RequestOptions.Time()
# restrict time to 1970 - now
if ro.time.start is None or ro.time.start < self.MinTime:
ro.time.start = self.MinTime
if ro.time.end is None or ro.time.end > maxEndTime:
ro.time.end = maxEndTime
# remove items with start time >= end time
self.streams = [x for x in self.streams if x.time.start < x.time.end]
# ---------------------------------------------------------------------------
def parse(self):
# quality (optional), currently not supported
key, value = self.getFirstValue(self.PQuality)
if value is not None:
value = value.upper()
if value in self.QualityValues:
self.quality = value
else:
self.raiseValueError(key)
# minimumlength(optional), currently not supported
self.minimumLength = self.parseFloat(self.PMinimumLength, 0)
# longestonly (optional), currently not supported
self.longestOnly = self.parseBool(self.PLongestOnly)
# generic parameters
self.parseTime()
self.parseChannel()
self.parseOutput()
################################################################################
class _MyRecordStream:
def __init__(self, url, trackerList, bufferSize):
self.__url = url
self.__trackerList = trackerList
self.__bufferSize = bufferSize
self.__tw = []
def addStream(self, net, sta, loc, cha, startt, endt, restricted, archNet):
self.__tw.append((net, sta, loc, cha, startt, endt, restricted, archNet))
@staticmethod
def __override_network(data, net):
inp = BytesIO(data)
out = BytesIO()
for rec in mseedlite.Input(inp):
rec.net = net
rec_len_exp = 9
while (1 << rec_len_exp) < rec.size:
rec_len_exp += 1
rec.write(out, rec_len_exp)
return out.getvalue()
def input(self):
fastsdsPrefix = "fastsds://"
if self.__url.startswith(fastsdsPrefix):
fastsds = SDS(self.__url[len(fastsdsPrefix) :])
else:
fastsds = None
for net, sta, loc, cha, startt, endt, restricted, archNet in self.__tw:
if not archNet:
archNet = net
size = 0
if fastsds:
start = dateutil.parser.parse(startt.iso()).replace(tzinfo=None)
end = dateutil.parser.parse(endt.iso()).replace(tzinfo=None)
for data in fastsds.getRawBytes(
start, end, archNet, sta, loc, cha, self.__bufferSize
):
size += len(data)
if archNet == net:
yield data
else:
try:
yield self.__override_network(data, net)
except Exception as e:
logging.error(f"could not override network code: {e}")
else:
rs = RecordStream.Open(self.__url)
if rs is None:
logging.error("could not open record stream")
break
rs.addStream(archNet, sta, loc, cha, startt, endt)
rsInput = RecordInput(rs, Array.INT, Record.SAVE_RAW)
eof = False
while not eof:
data = b""
while len(data) < self.__bufferSize:
try:
rec = rsInput.next()
except Exception as e:
logging.error(str(e))
eof = True
break
if rec is None:
eof = True
break
data += rec.raw().str()
if data:
size += len(data)
if archNet == net:
yield data
else:
try:
yield self.__override_network(data, net)
except Exception as e:
logging.error(f"could not override network code: {e}")
for tracker in self.__trackerList:
net_class = "t" if net[0] in "0123456789XYZ" else "p"
if size == 0:
tracker.line_status(
startt,
endt,
net,
sta,
cha,
loc,
restricted,
net_class,
True,
[],
"fdsnws",
"NODATA",
0,
"",
)
else:
tracker.line_status(
startt,
endt,
net,
sta,
cha,
loc,
restricted,
net_class,
True,
[],
"fdsnws",
"OK",
size,
"",
)
################################################################################
@implementer(interfaces.IPushProducer)
class _WaveformProducer:
def __init__(self, req, ro, rs, fileName, trackerList):
self.req = req
self.ro = ro
self.it = rs.input()
self.fileName = fileName
self.written = 0
self.trackerList = trackerList
self.paused = False
self.stopped = False
self.running = False
def _flush(self, data):
if self.stopped:
return
if not self.paused:
reactor.callInThread(self._collectData)
else:
self.running = False
if self.written == 0:
self.req.setHeader("Content-Type", "application/vnd.fdsn.mseed")
self.req.setHeader(
"Content-Disposition", f"attachment; filename={self.fileName}"
)
self.req.write(data)
self.written += len(data)
def _finish(self):
if self.stopped:
return
if self.written == 0:
msg = "no waveform data found"
errorpage = HTTP.renderErrorPage(
self.req, http.NO_CONTENT, msg, VERSION, self.ro
)
if errorpage:
self.req.write(errorpage)
for tracker in self.trackerList:
tracker.volume_status("fdsnws", "NODATA", 0, "")
tracker.request_status("END", "")
else:
logging.debug(
f"{self.ro.service}: returned {self.written} bytes of mseed data"
)
utils.accessLog(self.req, self.ro, http.OK, self.written, None)
for tracker in self.trackerList:
tracker.volume_status("fdsnws", "OK", self.written, "")
tracker.request_status("END", "")
self.req.unregisterProducer()
self.req.finish()
def _collectData(self):
try:
reactor.callFromThread(self._flush, next(self.it))
except StopIteration:
reactor.callFromThread(self._finish)
def pauseProducing(self):
self.paused = True
def resumeProducing(self):
self.paused = False
if not self.running:
self.running = True
reactor.callInThread(self._collectData)
def stopProducing(self):
self.stopped = True
logging.debug(
f"{self.ro.service}: returned {self.written} bytes of mseed data (not "
"completed)"
)
utils.accessLog(self.req, self.ro, http.OK, self.written, "not completed")
for tracker in self.trackerList:
tracker.volume_status("fdsnws", "ERROR", self.written, "")
tracker.request_status("END", "")
self.req.unregisterProducer()
self.req.finish()
################################################################################
@implementer(portal.IRealm)
class FDSNDataSelectRealm:
# ---------------------------------------------------------------------------
def __init__(self, inv, bufferSize, access):
self.__inv = inv
self.__bufferSize = bufferSize
self.__access = access
# ---------------------------------------------------------------------------
def requestAvatar(self, avatarId, _mind, *interfaces_):
if resource.IResource in interfaces_:
return (
resource.IResource,
FDSNDataSelect(
self.__inv,
self.__bufferSize,
self.__access,
{"mail": utils.u_str(avatarId), "blacklisted": False},
),
lambda: None,
)
raise NotImplementedError()
################################################################################
@implementer(portal.IRealm)
class FDSNDataSelectAuthRealm:
# ---------------------------------------------------------------------------
def __init__(self, inv, bufferSize, access, userdb):
self.__inv = inv
self.__bufferSize = bufferSize
self.__access = access
self.__userdb = userdb
# ---------------------------------------------------------------------------
def requestAvatar(self, avatarId, _mind, *interfaces_):
if resource.IResource in interfaces_:
return (
resource.IResource,
FDSNDataSelect(
self.__inv,
self.__bufferSize,
self.__access,
self.__userdb.getAttributes(utils.u_str(avatarId)),
),
lambda: None,
)
raise NotImplementedError()
################################################################################
class FDSNDataSelect(BaseResource):
isLeaf = True
# ---------------------------------------------------------------------------
def __init__(self, inv, bufferSize, access=None, user=None):
super().__init__(VERSION)
self._rsURL = Application.Instance().recordStreamURL()
self.__inv = inv
self.__access = access
self.__user = user
self.__bufferSize = bufferSize
# ---------------------------------------------------------------------------
def render_OPTIONS(self, req):
req.setHeader("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
req.setHeader(
"Access-Control-Allow-Headers",
"Accept, Content-Type, X-Requested-With, Origin",
)
req.setHeader("Content-Type", "text/plain; charset=utf-8")
return ""
# ---------------------------------------------------------------------------
def render_GET(self, req):
# Parse and validate GET parameters
ro = _DataSelectRequestOptions()
ro.userName = self.__user and self.__user.get("mail")
try:
ro.parseGET(req.args)
ro.parse()
# the GET operation supports exactly one stream filter
ro.streams.append(ro)
except ValueError as e:
logging.warning(str(e))
return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
return self._processRequest(req, ro)
# ---------------------------------------------------------------------------
def render_POST(self, req):
# Parse and validate POST parameters
ro = _DataSelectRequestOptions()
ro.userName = self.__user and self.__user.get("mail")
try:
ro.parsePOST(req.content)
ro.parse()
except ValueError as e:
logging.warning(str(e))
return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
return self._processRequest(req, ro)
# -----------------------------------------------------------------------
def _networkIter(self, ro):
for i in range(self.__inv.networkCount()):
net = self.__inv.network(i)
# network code
if ro.channel and not ro.channel.matchNet(net.code()):
continue
# start and end time
if ro.time:
try:
end = net.end()
except ValueError:
end = None
if not ro.time.match(net.start(), end):
continue
yield net
# ---------------------------------------------------------------------------
@staticmethod
def _stationIter(net, ro):
for i in range(net.stationCount()):
sta = net.station(i)
# station code
if ro.channel and not ro.channel.matchSta(sta.code()):
continue
# start and end time
if ro.time:
try:
end = sta.end()
except ValueError:
end = None
if not ro.time.match(sta.start(), end):
continue
yield sta
# ---------------------------------------------------------------------------
@staticmethod
def _locationIter(sta, ro):
for i in range(sta.sensorLocationCount()):
loc = sta.sensorLocation(i)
# location code
if ro.channel and not ro.channel.matchLoc(loc.code()):
continue
# start and end time
if ro.time:
try:
end = loc.end()
except ValueError:
end = None
if not ro.time.match(loc.start(), end):
continue
yield loc
# ---------------------------------------------------------------------------
@staticmethod
def _streamIter(loc, ro):
for i in range(loc.streamCount()):
stream = loc.stream(i)
# stream code
if ro.channel and not ro.channel.matchCha(stream.code()):
continue
# start and end time
if ro.time:
try:
end = stream.end()
except ValueError:
end = None
if not ro.time.match(stream.start(), end):
continue
yield stream, False
for i in range(loc.auxStreamCount()):
stream = loc.auxStream(i)
# stream code
if ro.channel and not ro.channel.matchCha(stream.code()):
continue
# start and end time
if ro.time:
try:
end = stream.end()
except ValueError:
end = None
if not ro.time.match(stream.start(), end):
continue
yield stream, True
# ---------------------------------------------------------------------------
def _processRequest(self, req, ro):
# pylint: disable=W0212
if ro.quality not in ("B", "M"):
msg = "quality other than 'B' or 'M' not supported"
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
if ro.minimumLength:
msg = "enforcing of minimum record length not supported"
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
if ro.longestOnly:
msg = "limitation to longest segment not supported"
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
app = Application.Instance()
ro._checkTimes(app._realtimeGap)
maxSamples = None
if app._samplesM is not None:
maxSamples = app._samplesM * 1000000
samples = 0
trackerList = []
userIP = ""
if app._trackdbEnabled or app._requestLog:
xff = req.requestHeaders.getRawHeaders("x-forwarded-for")
if xff:
userIP = xff[0].split(",")[0].strip()
else:
userIP = req.getClientIP()
clientID = req.getHeader("User-Agent")
if clientID:
clientID = clientID[:80]
else:
clientID = "fdsnws"
if app._trackdbEnabled:
if ro.userName:
userID = ro.userName
else:
userID = app._trackdbDefaultUser
reqID = f"ws{str(int(round(time.time() * 1000) - 1420070400000))}"
tracker = RequestTrackerDB(
clientID,
app.connection(),
reqID,
"WAVEFORM",
userID,
f"REQUEST WAVEFORM {reqID}",
"fdsnws",
userIP,
req.getClientIP(),
)
trackerList.append(tracker)
if app._requestLog:
tracker = app._requestLog.tracker(ro.service, ro.userName, userIP, clientID)
trackerList.append(tracker)
# Open record stream
rs = _MyRecordStream(self._rsURL, trackerList, self.__bufferSize)
forbidden = None
auxStreamsFound = False
# Add request streams
# iterate over inventory networks
for s in ro.streams:
for net in self._networkIter(s):
netRestricted = utils.isRestricted(net)
if not trackerList and netRestricted and not self.__user:
forbidden = forbidden or (forbidden is None)
continue
for sta in self._stationIter(net, s):
staRestricted = utils.isRestricted(sta)
if not trackerList and staRestricted and not self.__user:
forbidden = forbidden or (forbidden is None)
continue
for loc in self._locationIter(sta, s):
for cha, aux in self._streamIter(loc, s):
start_time = max(cha.start(), s.time.start)
try:
end_time = min(cha.end(), s.time.end)
except ValueError:
end_time = s.time.end
streamRestricted = (
netRestricted
or staRestricted
or utils.isRestricted(cha)
)
if streamRestricted and (
not self.__user
or (
self.__access
and not self.__access.authorize(
self.__user,
net.code(),
sta.code(),
loc.code(),
cha.code(),
start_time,
end_time,
)
)
):
for tracker in trackerList:
net_class = (
"t" if net.code()[0] in "0123456789XYZ" else "p"
)
tracker.line_status(
start_time,
end_time,
net.code(),
sta.code(),
cha.code(),
loc.code(),
True,
net_class,
True,
[],
"fdsnws",
"DENIED",
0,
"",
)
forbidden = forbidden or (forbidden is None)
continue
forbidden = False
# aux streams are deprecated, mark aux streams as
# present to report warning later on, also do not
# count aux stream samples due to their loose
# binding to a aux device and source which only
# optionally contains a sampling rate
if aux:
auxStreamsFound = True
# enforce maximum sample per request restriction
elif maxSamples is not None:
try:
n = cha.sampleRateNumerator()
d = cha.sampleRateDenominator()
except ValueError:
logging.warning(
"skipping stream without sampling rate "
f"definition: {net.code()}.{sta.code()}."
f"{loc.code()}.{cha.code()}"
)
continue
# calculate number of samples for requested
# time window
diffSec = (end_time - start_time).length()
samples += int(diffSec * n / d)
if samples > maxSamples:
msg = (
f"maximum number of {app._samplesM}M samples "
"exceeded"
)
return self.renderErrorPage(
req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro
)
logging.debug(
f"adding stream: {net.code()}.{sta.code()}.{loc.code()}"
f".{cha.code()} {start_time.iso()} - {end_time.iso()}"
)
rs.addStream(
net.code(),
sta.code(),
loc.code(),
cha.code(),
start_time,
end_time,
utils.isRestricted(cha),
sta.archiveNetworkCode(),
)
if forbidden:
for tracker in trackerList:
tracker.volume_status("fdsnws", "DENIED", 0, "")
tracker.request_status("END", "")
msg = "access denied"
return self.renderErrorPage(req, http.FORBIDDEN, msg, ro)
if forbidden is None:
for tracker in trackerList:
tracker.volume_status("fdsnws", "NODATA", 0, "")
tracker.request_status("END", "")
msg = "no metadata found"
return self.renderErrorPage(req, http.NO_CONTENT, msg, ro)
if auxStreamsFound:
msg = (
"the request contains at least one auxiliary stream which are "
"deprecated"
)
if maxSamples is not None:
msg += (
" and whose samples are not included in the maximum sample per "
"request limit"
)
logging.info(msg)
# Build output filename
fileName = (
Application.Instance()._fileNamePrefix.replace(
"%time", time.strftime("%Y-%m-%dT%H:%M:%S")
)
+ ".mseed"
)
# Create producer for async IO
prod = _WaveformProducer(req, ro, rs, fileName, trackerList)
req.registerProducer(prod, True)
prod.resumeProducing()
# The request is handled by the deferred object
return server.NOT_DONE_YET
# vim: ts=4 et

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,216 @@
################################################################################
# Copyright (C) 2014-2017 by GFZ Potsdam
#
# Classes to access an SDS structure to be used by the Dataselect-WS
#
# Author: Javier Quinteros
# Email: javier@gfz-potsdam.de
################################################################################
import datetime
import os
import seiscomp.logging
import seiscomp.mseedlite
class SDS:
def __init__(self, sdsRoot):
if isinstance(sdsRoot, list):
self.sdsRoot = sdsRoot
else:
self.sdsRoot = [sdsRoot]
def __getMSName(self, reqDate, net, sta, loc, cha):
for root in self.sdsRoot:
yield (
f"{root}/{reqDate.year}/{net}/{sta}/{cha}.D/{net}.{sta}.{loc}.{cha}.D."
f"{reqDate.year}.{reqDate.strftime('%j')}"
)
@staticmethod
def __time2recno(msFile, reclen, timeStart, recStart, timeEnd, recEnd, searchTime):
if searchTime <= timeStart:
msFile.seek(recStart * reclen)
rec = seiscomp.mseedlite.Record(msFile)
return (recStart, rec.end_time)
if searchTime >= timeEnd:
msFile.seek(recEnd * reclen)
rec = seiscomp.mseedlite.Record(msFile)
return (recEnd, rec.end_time)
t1 = timeStart
r1 = recStart
t2 = timeEnd
r2 = recEnd
rn = int(
r1
+ (r2 - r1) * (searchTime - t1).total_seconds() / (t2 - t1).total_seconds()
)
rn = max(rn, recStart)
rn = min(rn, recEnd)
while True:
msFile.seek(rn * reclen)
rec = seiscomp.mseedlite.Record(msFile)
if rec.begin_time < searchTime:
r1 = rn
t1 = rec.begin_time
if t1 == t2:
break
rn = int(
r1
+ (r2 - r1)
* (searchTime - t1).total_seconds()
/ (t2 - t1).total_seconds()
)
rn = max(rn, recStart)
rn = min(rn, recEnd)
if rn == r1:
break
else:
r2 = rn
t2 = rec.begin_time
if t1 == t2:
break
rn = int(
r2
- (r2 - r1)
* (t2 - searchTime).total_seconds()
/ (t2 - t1).total_seconds()
)
rn = max(rn, recStart)
rn = min(rn, recEnd)
if rn == r2:
break
return rn, rec.end_time
def __getWaveform(self, startt, endt, msFile, bufferSize):
if startt >= endt:
return
rec = seiscomp.mseedlite.Record(msFile)
reclen = rec.size
recStart = 0
timeStart = rec.begin_time
if rec.begin_time >= endt:
return
msFile.seek(-reclen, 2)
rec = seiscomp.mseedlite.Record(msFile)
recEnd = msFile.tell() // reclen - 1
timeEnd = rec.begin_time
if rec.end_time <= startt:
return
if timeStart >= timeEnd:
seiscomp.logging.error(
f"{msFile.name}: overlap detected (start={timeStart}, end={timeEnd})"
)
return
(lower, _) = self.__time2recno(
msFile, reclen, timeStart, recStart, timeEnd, recEnd, startt
)
(upper, _) = self.__time2recno(
msFile, reclen, startt, lower, timeEnd, recEnd, endt
)
if upper < lower:
seiscomp.logging.error(
f"{msFile.name}: overlap detected (lower={lower}, upper={upper})"
)
upper = lower
msFile.seek(lower * reclen)
remaining = (upper - lower + 1) * reclen
check = True
if bufferSize % reclen:
bufferSize += reclen - bufferSize % reclen
while remaining > 0:
size = min(remaining, bufferSize)
data = msFile.read(size)
remaining -= size
offset = 0
if not data:
return
if check:
while offset < len(data):
rec = seiscomp.mseedlite.Record(data[offset : offset + reclen])
if rec.begin_time >= endt:
return
if rec.end_time > startt:
break
offset += reclen
check = False
if offset < len(data):
yield data[offset:] if offset else data
while True:
data = msFile.read(reclen)
if not data:
return
rec = seiscomp.mseedlite.Record(data)
if rec.begin_time >= endt:
return
yield data
def __getDayRaw(self, day, startt, endt, net, sta, loc, cha, bufferSize):
# Take into account the case of empty location
if loc == "--":
loc = ""
for dataFile in self.__getMSName(day, net, sta, loc, cha):
if not os.path.exists(dataFile):
continue
try:
with open(dataFile, "rb") as msFile:
for buf in self.__getWaveform(startt, endt, msFile, bufferSize):
yield buf
except seiscomp.mseedlite.MSeedError as e:
seiscomp.logging.error(f"{dataFile}: {e}")
def getRawBytes(self, startt, endt, net, sta, loc, cha, bufferSize):
day = datetime.datetime(
startt.year, startt.month, startt.day
) - datetime.timedelta(days=1)
endDay = datetime.datetime(endt.year, endt.month, endt.day)
while day <= endDay:
for buf in self.__getDayRaw(
day, startt, endt, net, sta, loc, cha, bufferSize
):
yield buf
day += datetime.timedelta(days=1)

View File

@ -0,0 +1,296 @@
################################################################################
# Copyright (C) 2013-2014 by gempa GmbH
#
# HTTP -- Utility methods which generate HTTP result strings
#
# Author: Stephan Herrnkind
# Email: herrnkind@gempa.de
################################################################################
from twisted.web import http, resource, server, static, util
import seiscomp.core
import seiscomp.logging
from .utils import accessLog, b_str, u_str, writeTSBin
VERSION = "1.2.5"
################################################################################
class HTTP:
# ---------------------------------------------------------------------------
@staticmethod
def renderErrorPage(request, code, msg, version=VERSION, ro=None):
resp = b"""\
Error %i: %s
%s
Usage details are available from %s
Request:
%s
Request Submitted:
%s
Service Version:
%s
"""
noContent = code == http.NO_CONTENT
# rewrite response code if requested and no data was found
if noContent and ro is not None:
code = ro.noData
# set response code
request.setResponseCode(code)
# status code 204 requires no message body
if code == http.NO_CONTENT:
response = b""
else:
request.setHeader("Content-Type", "text/plain; charset=utf-8")
reference = b"%s/" % request.path.rpartition(b"/")[0]
codeStr = http.RESPONSES[code]
date = b_str(seiscomp.core.Time.GMT().toString("%FT%T.%f"))
response = resp % (
code,
codeStr,
b_str(msg),
reference,
request.uri,
date,
b_str(version),
)
if not noContent:
seiscomp.logging.warning(
f"responding with error: {code} ({u_str(codeStr)})"
)
accessLog(request, ro, code, len(response), msg)
return response
# ---------------------------------------------------------------------------
@staticmethod
def renderNotFound(request, version=VERSION):
msg = "The requested resource does not exist on this server."
return HTTP.renderErrorPage(request, http.NOT_FOUND, msg, version)
# ---------------------------------------------------------------------------
@staticmethod
def renderNotModified(request, ro=None):
code = http.NOT_MODIFIED
request.setResponseCode(code)
request.responseHeaders.removeHeader("Content-Type")
accessLog(request, ro, code, 0, None)
################################################################################
class ServiceVersion(resource.Resource):
isLeaf = True
# ---------------------------------------------------------------------------
def __init__(self, version):
super().__init__()
self.version = version
self.type = "text/plain"
# ---------------------------------------------------------------------------
def render(self, request):
request.setHeader("Content-Type", "text/plain; charset=utf-8")
return b_str(self.version)
################################################################################
class WADLFilter(static.Data):
# ---------------------------------------------------------------------------
def __init__(self, path, paramNameFilterList):
data = ""
removeParam = False
with open(path, "r", encoding="utf-8") as fp:
for line in fp:
lineStripped = line.strip().replace(" ", "")
if removeParam:
if "</param>" in lineStripped:
removeParam = False
continue
valid = True
if "<param" in lineStripped:
for f in paramNameFilterList:
if f'name="{f}"' in lineStripped:
valid = False
if lineStripped[-2:] != "/>":
removeParam = True
break
if valid:
data += line
super().__init__(b_str(data), "application/xml; charset=utf-8")
################################################################################
class BaseResource(resource.Resource):
# ---------------------------------------------------------------------------
def __init__(self, version=VERSION):
super().__init__()
self.version = version
# ---------------------------------------------------------------------------
def renderErrorPage(self, request, code, msg, ro=None):
return HTTP.renderErrorPage(request, code, msg, self.version, ro)
# ---------------------------------------------------------------------------
def writeErrorPage(self, request, code, msg, ro=None):
data = self.renderErrorPage(request, code, msg, ro)
if data:
writeTSBin(request, data)
# ---------------------------------------------------------------------------
def returnNotModified(self, request, ro=None):
HTTP.renderNotModified(request, ro)
# ---------------------------------------------------------------------------
# Renders error page if the result set exceeds the configured maximum number
# objects
def checkObjects(self, request, objCount, maxObj):
if objCount <= maxObj:
return True
msg = (
"The result set of your request exceeds the configured maximum "
f"number of objects ({maxObj}). Refine your request parameters."
)
self.writeErrorPage(request, http.REQUEST_ENTITY_TOO_LARGE, msg)
return False
################################################################################
class NoResource(BaseResource):
isLeaf = True
# ---------------------------------------------------------------------------
def render(self, request):
return HTTP.renderNotFound(request, self.version)
# ---------------------------------------------------------------------------
def getChild(self, _path, _request):
return self
################################################################################
class ListingResource(BaseResource):
html = """<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="author" content="gempa GmbH">
<title>SeisComP FDSNWS Implementation</title>
</head>
<body>
<p><a href="../">Parent Directory</a></p>
<h1>SeisComP FDSNWS Web Service</h1>
<p>Index of %s</p>
<ul>
%s
</ul>
</body>"""
# ---------------------------------------------------------------------------
def render(self, request):
lis = ""
if request.path[-1:] != b"/":
return util.redirectTo(request.path + b"/", request)
for k, v in self.children.items():
if v.isLeaf:
continue
if hasattr(v, "hideInListing") and v.hideInListing:
continue
name = u_str(k)
lis += f'<li><a href="{name}/">{name}/</a></li>\n'
return b_str(ListingResource.html % (u_str(request.path), lis))
# ---------------------------------------------------------------------------
def getChild(self, path, _request):
if not path:
return self
return NoResource(self.version)
################################################################################
class DirectoryResource(static.File):
# ---------------------------------------------------------------------------
def __init__(self, fileName, version=VERSION):
super().__init__(fileName)
self.version = version
self.childNotFound = NoResource(self.version)
# ---------------------------------------------------------------------------
def render(self, request):
if request.path[-1:] != b"/":
return util.redirectTo(request.path + b"/", request)
return static.File.render(self, request)
# ---------------------------------------------------------------------------
def getChild(self, path, _request):
if not path:
return self
return NoResource(self.version)
################################################################################
class Site(server.Site):
def __init__(self, res, corsOrigins):
super().__init__(res)
self._corsOrigins = corsOrigins
# ---------------------------------------------------------------------------
def getResourceFor(self, request):
seiscomp.logging.debug(
f"request ({request.getClientIP()}): {u_str(request.uri)}"
)
request.setHeader("Server", f"SeisComP-FDSNWS/{VERSION}")
request.setHeader("Access-Control-Allow-Headers", "Authorization")
request.setHeader("Access-Control-Expose-Headers", "WWW-Authenticate")
self.setAllowOrigin(request)
return server.Site.getResourceFor(self, request)
# ---------------------------------------------------------------------------
def setAllowOrigin(self, req):
# no allowed origin: no response header
lenOrigins = len(self._corsOrigins)
if lenOrigins == 0:
return
# one origin: add header
if lenOrigins == 1:
req.setHeader("Access-Control-Allow-Origin", self._corsOrigins[0])
return
# more than one origin: check current origin against allowed origins
# and return the current origin on match.
origin = req.getHeader("Origin")
if origin in self._corsOrigins:
req.setHeader("Access-Control-Allow-Origin", origin)
# Set Vary header to let the browser know that the response depends
# on the request. Certain cache strategies should be disabled.
req.setHeader("Vary", "Origin")

View File

@ -0,0 +1,101 @@
################################################################################
# Copyright (C) 2013-2014 gempa GmbH
#
# Thread-safe file logger
#
# Author: Stephan Herrnkind
# Email: herrnkind@gempa.de
################################################################################
import os
import sys
import time
import threading
from queue import Queue
# -------------------------------------------------------------------------------
def _worker(log):
while True:
# pylint: disable=W0212
msg = log._queue.get()
log._write(str(msg))
log._queue.task_done()
################################################################################
class Log:
# ---------------------------------------------------------------------------
def __init__(self, filePath, archiveSize=7):
self._filePath = filePath
self._basePath = os.path.dirname(filePath)
self._fileName = os.path.basename(filePath)
self._archiveSize = archiveSize
self._queue = Queue()
self._lastLogTime = None
self._fd = None
self._archiveSize = max(self._archiveSize, 0)
# worker thread, responsible for writing messages to file
t = threading.Thread(target=_worker, args=(self,))
t.daemon = True
t.start()
# ---------------------------------------------------------------------------
def __del__(self):
# wait for worker thread to write all pending log messages
self._queue.join()
if self._fd is not None:
self._fd.close()
# ---------------------------------------------------------------------------
def log(self, msg):
self._queue.put(msg)
# ---------------------------------------------------------------------------
def _rotate(self):
self._fd.close()
self._fd = None
try:
pattern = f"{self._filePath}.%i"
for i in range(self._archiveSize, 1, -1):
src = pattern % (i - 1)
if os.path.isfile(src):
os.rename(pattern % (i - 1), pattern % i)
os.rename(self._filePath, pattern % 1)
except Exception as e:
print(f"failed to rotate access log: {e}", file=sys.stderr)
self._fd = open(self._filePath, "w", encoding="utf-8")
# ---------------------------------------------------------------------------
def _write(self, msg):
try:
now = time.localtime()
if self._fd is None:
if self._basePath and not os.path.exists(self._basePath):
os.makedirs(self._basePath)
self._fd = open(self._filePath, "a", encoding="utf-8")
elif (
self._archiveSize > 0
and self._lastLogTime is not None
and (
self._lastLogTime.tm_yday != now.tm_yday
or self._lastLogTime.tm_year != now.tm_year
)
):
self._rotate()
print(msg, file=self._fd)
self._fd.flush()
self._lastLogTime = now
except Exception as e:
print(f"access log: {e}", file=sys.stderr)
# vim: ts=4 et

View File

@ -0,0 +1,138 @@
import os
import datetime
import json
import hashlib
import subprocess
import logging
import logging.handlers
import threading
from .utils import b_str
mutex = threading.Lock()
class MyFileHandler(logging.handlers.TimedRotatingFileHandler):
def __init__(self, filename):
super().__init__(filename, when="midnight", utc=True)
def rotate(self, source, dest):
super().rotate(source, dest)
if os.path.exists(dest):
subprocess.Popen(["bzip2", dest])
class Tracker:
def __init__(self, logger, geoip, service, userName, userIP, clientID, userSalt):
self.__logger = logger
self.__userName = userName
self.__userSalt = userSalt
self.__logged = False
if userName:
userID = int(
hashlib.md5(b_str(userSalt + userName.lower())).hexdigest()[:8], 16
)
else:
userID = int(hashlib.md5(b_str(userSalt + userIP)).hexdigest()[:8], 16)
self.__data = {
"service": service,
"userID": userID,
"clientID": clientID,
"userEmail": None,
"auth": bool(userName),
"userLocation": {},
"created": f"{datetime.datetime.utcnow().isoformat()}Z",
}
if geoip:
self.__data["userLocation"]["country"] = geoip.country_code_by_addr(userIP)
if (
userName and userName.lower().endswith("@gfz-potsdam.de")
) or userIP.startswith("139.17."):
self.__data["userLocation"]["institution"] = "GFZ"
# pylint: disable=W0613
def line_status(
self,
start_time,
end_time,
network,
station,
channel,
location,
restricted,
net_class,
shared,
constraints,
volume,
status,
size,
message,
):
try:
trace = self.__data["trace"]
except KeyError:
trace = []
self.__data["trace"] = trace
trace.append(
{
"net": network,
"sta": station,
"loc": location,
"cha": channel,
"start": start_time.iso(),
"end": end_time.iso(),
"restricted": restricted,
"status": status,
"bytes": size,
}
)
if restricted and status == "OK":
self.__data["userEmail"] = self.__userName
# FDSNWS requests have one volume, so volume_status() is called once per request
def volume_status(self, volume, status, size, message):
self.__data["status"] = status
self.__data["bytes"] = size
self.__data["finished"] = f"{datetime.datetime.utcnow().isoformat()}Z"
def request_status(self, status, message):
with mutex:
if not self.__logged:
self.__logger.info(json.dumps(self.__data))
self.__logged = True
class RequestLog:
def __init__(self, filename, userSalt):
self.__logger = logging.getLogger("seiscomp.fdsnws.reqlog")
self.__logger.addHandler(MyFileHandler(filename))
self.__logger.setLevel(logging.INFO)
self.__userSalt = userSalt
try:
import GeoIP
self.__geoip = GeoIP.new(GeoIP.GEOIP_MEMORY_CACHE)
except ImportError:
self.__geoip = None
def tracker(self, service, userName, userIP, clientID):
return Tracker(
self.__logger,
self.__geoip,
service,
userName,
userIP,
clientID,
self.__userSalt,
)

View File

@ -0,0 +1,179 @@
from twisted.internet import reactor
import seiscomp.core
import seiscomp.datamodel
def callFromThread(f):
def wrap(*args, **kwargs):
reactor.callFromThread(f, *args, **kwargs)
return wrap
def enableNotifier(f):
def wrap(*args, **kwargs):
saveState = seiscomp.datamodel.Notifier.IsEnabled()
seiscomp.datamodel.Notifier.SetEnabled(True)
f(*args, **kwargs)
seiscomp.datamodel.Notifier.SetEnabled(saveState)
return wrap
class RequestTrackerDB(object):
def __init__(
self,
appName,
msgConn,
req_id,
req_type,
user,
header,
label,
user_ip,
client_ip,
):
self.msgConn = msgConn
self.arclinkRequest = seiscomp.datamodel.ArclinkRequest.Create()
self.arclinkRequest.setCreated(seiscomp.core.Time.GMT())
self.arclinkRequest.setRequestID(req_id)
self.arclinkRequest.setUserID(str(user))
self.arclinkRequest.setClientID(appName)
if user_ip:
self.arclinkRequest.setUserIP(user_ip)
if client_ip:
self.arclinkRequest.setClientIP(client_ip)
self.arclinkRequest.setType(req_type)
self.arclinkRequest.setLabel(label)
self.arclinkRequest.setHeader(header)
self.averageTimeWindow = seiscomp.core.TimeSpan(0.0)
self.totalLineCount = 0
self.okLineCount = 0
self.requestLines = []
self.statusLines = []
def send(self):
msg = seiscomp.datamodel.Notifier.GetMessage(True)
if msg:
self.msgConn.send("LOGGING", msg)
def line_status(
self,
start_time,
end_time,
network,
station,
channel,
location,
restricted,
net_class,
shared,
constraints,
volume,
status,
size,
message,
):
if network is None or network == "":
network = "."
if station is None or station == "":
station = "."
if channel is None or channel == "":
channel = "."
if location is None or location == "":
location = "."
if volume is None:
volume = "NODATA"
if size is None:
size = 0
if message is None:
message = ""
if isinstance(constraints, list):
constr = " ".join(constraints)
else:
constr = " ".join([f"{a}={b}" for (a, b) in constraints.items()])
arclinkRequestLine = seiscomp.datamodel.ArclinkRequestLine()
arclinkRequestLine.setStart(start_time)
arclinkRequestLine.setEnd(end_time)
arclinkRequestLine.setStreamID(
seiscomp.datamodel.WaveformStreamID(
network[:8], station[:8], location[:8], channel[:8], ""
)
)
arclinkRequestLine.setConstraints(constr)
if isinstance(restricted, bool):
arclinkRequestLine.setRestricted(restricted)
arclinkRequestLine.setNetClass(net_class)
if isinstance(shared, bool):
arclinkRequestLine.setShared(shared)
#
arclinkStatusLine = seiscomp.datamodel.ArclinkStatusLine()
arclinkStatusLine.setVolumeID(volume)
arclinkStatusLine.setStatus(status)
arclinkStatusLine.setSize(size)
arclinkStatusLine.setMessage(message)
#
arclinkRequestLine.setStatus(arclinkStatusLine)
self.requestLines.append(arclinkRequestLine)
self.averageTimeWindow += end_time - start_time
self.totalLineCount += 1
if status == "OK":
self.okLineCount += 1
def volume_status(self, volume, status, size, message):
if volume is None:
volume = "NODATA"
if size is None:
size = 0
if message is None:
message = ""
arclinkStatusLine = seiscomp.datamodel.ArclinkStatusLine()
arclinkStatusLine.setVolumeID(volume)
arclinkStatusLine.setStatus(status)
arclinkStatusLine.setSize(size)
arclinkStatusLine.setMessage(message)
self.statusLines.append(arclinkStatusLine)
@callFromThread
@enableNotifier
def request_status(self, status, message):
if message is None:
message = ""
self.arclinkRequest.setStatus(status)
self.arclinkRequest.setMessage(message)
ars = seiscomp.datamodel.ArclinkRequestSummary()
tw = self.averageTimeWindow.seconds()
if self.totalLineCount > 0:
# avarage request time window
tw = self.averageTimeWindow.seconds() // self.totalLineCount
if tw >= 2**31:
tw = -1 # prevent 32bit int overflow
ars.setAverageTimeWindow(tw)
ars.setTotalLineCount(self.totalLineCount)
ars.setOkLineCount(self.okLineCount)
self.arclinkRequest.setSummary(ars)
al = seiscomp.datamodel.ArclinkLog()
al.add(self.arclinkRequest)
for obj in self.requestLines:
self.arclinkRequest.add(obj)
for obj in self.statusLines:
self.arclinkRequest.add(obj)
self.send()
def __verseed_errors(self, volume):
pass
def verseed(self, volume, file):
pass

View File

@ -0,0 +1,609 @@
################################################################################
# Copyright (C) 2013-2014 gempa GmbH
#
# RequestOptions -- HTTP GET request parameters
#
# Author: Stephan Herrnkind
# Email: herrnkind@gempa.de
################################################################################
import fnmatch
import math
import re
from twisted.web import http
from seiscomp.core import Time
import seiscomp.logging
import seiscomp.math
from .utils import u_str
class RequestOptions:
# the match() method matched only patterns at the beginning of a string,
# since we have to ensure that no invalid character is present we use the
# search() method in combination with a negated pattern instead
FloatChars = re.compile(r"[^-0-9.]").search
ChannelChars = re.compile(r"[^A-Za-z0-9*?]").search
ChannelExtChars = re.compile(r"[^A-Za-z0-9*?+\-_]").search
BooleanTrueValues = ["1", "true", "t", "yes", "y"]
BooleanFalseValues = ["0", "false", "f", "no", "n"]
OutputFormats = [] # override in derived classes
PStart = ["starttime", "start"]
PEnd = ["endtime", "end"]
PStartBefore = ["startbefore"]
PStartAfter = ["startafter"]
PEndBefore = ["endbefore"]
PEndAfter = ["endafter"]
SimpleTimeParams = PStart + PEnd
WindowTimeParams = PStartBefore + PStartAfter + PEndBefore + PEndAfter
TimeParams = SimpleTimeParams + WindowTimeParams
PNet = ["network", "net"]
PSta = ["station", "sta"]
PLoc = ["location", "loc"]
PCha = ["channel", "cha"]
StreamParams = PNet + PSta + PLoc + PCha
PMinLat = ["minlatitude", "minlat"]
PMaxLat = ["maxlatitude", "maxlat"]
PMinLon = ["minlongitude", "minlon"]
PMaxLon = ["maxlongitude", "maxlon"]
PLat = ["latitude", "lat"]
PLon = ["longitude", "lon"]
PMinRadius = ["minradius"]
PMaxRadius = ["maxradius"]
GeoRectParams = PMinLat + PMaxLat + PMinLon + PMaxLon
GeoCircleParams = PLat + PLon + PMinRadius + PMaxRadius
GeoParams = GeoRectParams + GeoCircleParams
PFormat = ["format"]
PNoData = ["nodata"]
OutputParams = PFormat + PNoData
POSTParams = OutputParams
GETParams = StreamParams + SimpleTimeParams
# ---------------------------------------------------------------------------
class Channel:
def __init__(self):
self.net = None
self.sta = None
self.loc = None
self.cha = None
def matchNet(self, value):
return self.match(value, self.net)
def matchSta(self, value):
return self.match(value, self.sta)
def matchLoc(self, value):
return self.match(value, self.loc, True)
def matchCha(self, value):
return self.match(value, self.cha)
@staticmethod
def match(value, globList, testEmpty=False):
if not globList:
return True
for glob in globList:
if testEmpty and value == "" and glob == "--":
return True
if fnmatch.fnmatchcase(value, glob):
return True
return False
# ---------------------------------------------------------------------------
class Time:
def __init__(self):
self.simpleTime = True
self.start = None
self.end = None
# window time only
self.startBefore = None
self.startAfter = None
self.endBefore = None
self.endAfter = None
# used by FDSN Station and DataSelect
def match(self, start, end=None):
# simple time: limit to epochs intersecting with the specified time
# range
res = (self.start is None or end is None or end >= self.start) and (
self.end is None or start <= self.end
)
# window time: limit to epochs strictly starting or ending before or
# after a specified time value
if not self.simpleTime:
res = (
res
and (
self.startBefore is None
or (start is not None and start < self.startBefore)
)
and (
self.startAfter is None
or (start is not None and start > self.startAfter)
)
and (
self.endBefore is None
or (end is not None and end < self.endBefore)
)
and (self.endAfter is None or end is None or end > self.endAfter)
)
return res
# ---------------------------------------------------------------------------
class Geo:
# -----------------------------------------------------------------------
class BBox:
def __init__(self):
self.minLat = None
self.maxLat = None
self.minLon = None
self.maxLon = None
def dateLineCrossing(self):
return self.minLon and self.maxLon and self.minLon > self.maxLon
# -----------------------------------------------------------------------
class BCircle:
def __init__(self):
self.lat = None
self.lon = None
self.minRad = None
self.maxRad = None
# -------------------------------------------------------------------
# Calculates outer bounding box
def calculateBBox(self):
def rad(degree):
return math.radians(degree)
def deg(radians):
return math.degrees(radians)
b = RequestOptions.Geo.BBox()
if self.maxRad is None or self.maxRad >= 180:
return b
b.minLat = self.lat - self.maxRad
b.maxLat = self.lat + self.maxRad
if b.minLat > -90 and b.maxLat < 90:
dLon = deg(
math.asin(math.sin(rad(self.maxRad) / math.cos(rad(self.lat))))
)
b.minLon = self.lon - dLon
if b.minLon < -180:
b.minLon += 360
b.maxLon = self.lon + dLon
if b.maxLon > 180:
b.maxLon -= 360
else:
# pole within distance: one latitude and no longitude
# restrictions remains
if b.minLat <= -90:
b.minLat = None
else:
b.maxLat = None
b.minLon = None
b.maxLon = None
return b
# -----------------------------------------------------------------------
def __init__(self):
self.bBox = None
self.bCircle = None
# -----------------------------------------------------------------------
def match(self, lat, lon):
if self.bBox is not None:
b = self.bBox
if b.minLat is not None and lat < b.minLat:
return False
if b.maxLat is not None and lat > b.maxLat:
return False
# date line crossing if minLon > maxLon
if b.dateLineCrossing():
return lon >= b.minLon or lon <= b.maxLon
if b.minLon is not None and lon < b.minLon:
return False
if b.maxLon is not None and lon > b.maxLon:
return False
return True
if self.bCircle:
c = self.bCircle
dist = seiscomp.math.delazi(c.lat, c.lon, lat, lon)
if c.minRad is not None and dist[0] < c.minRad:
return False
if c.maxRad is not None and dist[0] > c.maxRad:
return False
return True
return False
# ---------------------------------------------------------------------------
def __init__(self):
self.service = ""
self.accessTime = Time.GMT()
self.userName = None
self.time = None
self.channel = None
self.geo = None
self.noData = http.NO_CONTENT
self.format = None
self._args = {}
self.streams = [] # 1 entry for GET, multipl
# ---------------------------------------------------------------------------
def parseOutput(self):
# nodata
code = self.parseInt(self.PNoData)
if code is not None:
if code not in (http.NO_CONTENT, http.NOT_FOUND):
self.raiseValueError(self.PNoData[0])
self.noData = code
# format
key, value = self.getFirstValue(self.PFormat)
if value is None:
# no format specified: default to first in list if available
if len(self.OutputFormats) > 0:
self.format = self.OutputFormats[0]
else:
value = value.lower()
if value in self.OutputFormats:
self.format = value
else:
self.raiseValueError(key)
# ---------------------------------------------------------------------------
def parseChannel(self):
c = RequestOptions.Channel()
c.net = self.parseChannelChars(self.PNet, False, True)
c.sta = self.parseChannelChars(self.PSta)
c.loc = self.parseChannelChars(self.PLoc, True)
c.cha = self.parseChannelChars(self.PCha)
if c.net or c.sta or c.loc or c.cha:
self.channel = c
# ---------------------------------------------------------------------------
def parseTime(self, parseWindowTime=False):
t = RequestOptions.Time()
# start[time], end[time]
t.start = self.parseTimeStr(self.PStart)
t.end = self.parseTimeStr(self.PEnd)
simpleTime = t.start is not None or t.end is not None
# [start,end][before,after]
if parseWindowTime:
t.startBefore = self.parseTimeStr(self.PStartBefore)
t.startAfter = self.parseTimeStr(self.PStartAfter)
t.endBefore = self.parseTimeStr(self.PEndBefore)
t.endAfter = self.parseTimeStr(self.PEndAfter)
windowTime = (
t.startBefore is not None
or t.startAfter is not None
or t.endBefore is not None
or t.endAfter is not None
)
if simpleTime or windowTime:
self.time = t
self.time.simpleTime = not windowTime
elif simpleTime:
self.time = t
self.time.simpleTime = True
# ---------------------------------------------------------------------------
def parseGeo(self):
# bounding box (optional)
b = RequestOptions.Geo.BBox()
b.minLat = self.parseFloat(self.PMinLat, -90, 90)
b.maxLat = self.parseFloat(self.PMaxLat, -90, 90)
if b.minLat is not None and b.maxLat is not None and b.minLat > b.maxLat:
raise ValueError(f"{self.PMinLat[0]} exceeds {self.PMaxLat[0]}")
b.minLon = self.parseFloat(self.PMinLon, -180, 180)
b.maxLon = self.parseFloat(self.PMaxLon, -180, 180)
# maxLon < minLon -> date line crossing
hasBBoxParam = (
b.minLat is not None
or b.maxLat is not None
or b.minLon is not None
or b.maxLon is not None
)
# bounding circle (optional)
c = RequestOptions.Geo.BCircle()
c.lat = self.parseFloat(self.PLat, -90, 90)
c.lon = self.parseFloat(self.PLon, -180, 180)
c.minRad = self.parseFloat(self.PMinRadius, 0, 180)
c.maxRad = self.parseFloat(self.PMaxRadius, 0, 180)
if c.minRad is not None and c.maxRad is not None and c.minRad > c.maxRad:
raise ValueError(f"{self.PMinRadius[0]} exceeds {self.PMaxRadius[0]}")
hasBCircleRadParam = c.minRad is not None or c.maxRad is not None
hasBCircleParam = c.lat is not None or c.lon is not None or hasBCircleRadParam
# bounding box and bounding circle may not be combined
if hasBBoxParam and hasBCircleParam:
raise ValueError(
"bounding box and bounding circle parameters may not be combined"
)
if hasBBoxParam:
self.geo = RequestOptions.Geo()
self.geo.bBox = b
elif hasBCircleRadParam:
self.geo = RequestOptions.Geo()
if c.lat is None:
c.lat = 0.0
if c.lon is None:
c.lon = 0.0
self.geo.bCircle = c
# ---------------------------------------------------------------------------
@staticmethod
def _assertValueRange(key, v, minValue, maxValue):
if (minValue is not None and v < minValue) or (
maxValue is not None and v > maxValue
):
minStr, maxStr = "-inf", "inf"
if minValue is not None:
minStr = str(minValue)
if maxValue is not None:
maxStr = str(maxValue)
raise ValueError(f"parameter not in domain [{minStr},{maxStr}]: {key}")
# ---------------------------------------------------------------------------
@staticmethod
def raiseValueError(key):
raise ValueError(f"invalid value in parameter: {key}")
# ---------------------------------------------------------------------------
def getFirstValue(self, keys):
for key in keys:
if key in self._args:
return key, self._args[key][0].strip()
return None, None
# ---------------------------------------------------------------------------
def getValues(self, keys):
v = []
for key in keys:
if key in self._args:
v += self._args[key]
return v
# ---------------------------------------------------------------------------
def getListValues(self, keys, lower=False):
values = set()
for key in keys:
if key not in self._args:
continue
for vList in self._args[key]:
for v in vList.split(","):
if v is None:
continue
v = v.strip()
if lower:
v = v.lower()
values.add(v)
return values
# ---------------------------------------------------------------------------
def parseInt(self, keys, minValue=None, maxValue=None):
key, value = self.getFirstValue(keys)
if value is None:
return None
try:
i = int(value)
except ValueError as e:
raise ValueError(f"invalid integer value in parameter: {key}") from e
self._assertValueRange(key, i, minValue, maxValue)
return i
# ---------------------------------------------------------------------------
def parseFloat(self, keys, minValue=None, maxValue=None):
key, value = self.getFirstValue(keys)
if value is None:
return None
if self.FloatChars(value):
raise ValueError(
f"invalid characters in float parameter: {key} (scientific notation "
"forbidden by spec)"
)
try:
f = float(value)
except ValueError as e:
raise ValueError(f"invalid float value in parameter: {key}") from e
self._assertValueRange(key, f, minValue, maxValue)
return f
# ---------------------------------------------------------------------------
def parseBool(self, keys):
key, value = self.getFirstValue(keys)
if value is None:
return None
value = value.lower()
if value in self.BooleanTrueValues:
return True
if value in self.BooleanFalseValues:
return False
raise ValueError(f"invalid boolean value in parameter: {key}")
# ---------------------------------------------------------------------------
def parseTimeStr(self, keys):
key, value = self.getFirstValue(keys)
if value is None:
return None
time = Time.FromString(value)
# use explicit test for None here since bool value for epoch date
# (1970-01-01) is False
if time is None:
raise ValueError(f"invalid date format in parameter: {key}")
return time
# ---------------------------------------------------------------------------
def parseChannelChars(self, keys, allowEmpty=False, useExtChars=False):
# channel parameters may be specified as a comma separated list and may
# be repeated several times
values = None
for vList in self.getValues(keys):
if values is None:
values = []
for v in vList.split(","):
v = v.strip()
if allowEmpty and (v == "--" or len(v) == 0):
values.append("--")
continue
if (useExtChars and self.ChannelExtChars(v)) or (
not useExtChars and self.ChannelChars(v)
):
raise ValueError(f"invalid characters in parameter: {keys[0]}")
values.append(v)
return values
# ---------------------------------------------------------------------------
def parseGET(self, args):
# transform keys to lower case
if args is not None:
for k, v in args.items():
k = u_str(k.lower())
if k not in self.GETParams:
raise ValueError(f"invalid param: {k}")
self._args[k] = [u_str(x) for x in v]
# ---------------------------------------------------------------------------
def parsePOST(self, content):
nLine = 0
for line in content:
nLine += 1
line = u_str(line.strip())
# ignore empty and comment lines
if len(line) == 0 or line[0] == "#":
continue
# collect parameter (non stream lines)
toks = line.split("=", 1)
if len(toks) > 1:
key = toks[0].strip().lower()
isPOSTParam = False
for p in self.POSTParams:
if p == key:
if key not in self._args:
self._args[key] = []
self._args[key].append(toks[1].strip())
isPOSTParam = True
break
if isPOSTParam:
continue
# time parameters not allowed in POST header
for p in self.TimeParams:
if p == key:
raise ValueError(
f"time parameter in line {nLine} not allowed in POST "
"request"
)
# stream parameters not allowed in POST header
for p in self.StreamParams:
if p == key:
raise ValueError(
f"stream parameter in line {nLine} not allowed in POST "
"request"
)
raise ValueError(f"invalid parameter in line {nLine}")
# stream parameters
toks = line.split()
nToks = len(toks)
if nToks not in (5, 6):
raise ValueError("invalid number of stream components in line {nLine}")
ro = RequestOptions()
# net, sta, loc, cha
ro.channel = RequestOptions.Channel()
ro.channel.net = toks[0].split(",")
ro.channel.sta = toks[1].split(",")
ro.channel.loc = toks[2].split(",")
ro.channel.cha = toks[3].split(",")
msg = "invalid %s value in line %i"
for net in ro.channel.net:
if ro.ChannelChars(net):
raise ValueError(msg % ("network", nLine))
for sta in ro.channel.sta:
if ro.ChannelChars(sta):
raise ValueError(msg % ("station", nLine))
for loc in ro.channel.loc:
if loc != "--" and ro.ChannelChars(loc):
raise ValueError(msg % ("location", nLine))
for cha in ro.channel.cha:
if ro.ChannelChars(cha):
raise ValueError(msg % ("channel", nLine))
# start/end time
ro.time = RequestOptions.Time()
ro.time.start = Time.FromString(toks[4])
logEnd = "-"
if len(toks) > 5:
ro.time.end = Time.FromString(toks[5])
logEnd = ro.time.end.iso()
seiscomp.logging.debug(
f"ro: {ro.channel.net}.{ro.channel.sta}.{ro.channel.loc}."
f"{ro.channel.cha} {ro.time.start.iso()} {logEnd}"
)
self.streams.append(ro)
if not self.streams:
raise ValueError("at least one stream line is required")
# vim: ts=4 et

View File

@ -0,0 +1,936 @@
################################################################################
# Copyright (C) 2013-2014 gempa GmbH
#
# FDSNStation -- Implements the fdsnws-station Web service, see
# http://www.fdsn.org/webservices/
#
# Feature notes:
# - 'updatedafter' request parameter not implemented: The last modification
# time in SeisComP is tracked on the object level. If a child of an object
# is updated the update time is not propagated to all parents. In order to
# check if a station was updated all children must be evaluated recursively.
# This operation would be much to expensive.
# - additional request parameters:
# - formatted: boolean, default: false
# - additional values of request parameters:
# - format
# - standard: [xml, text]
# - additional: [fdsnxml (=xml), stationxml, sc3ml]
# - default: xml
#
# Author: Stephan Herrnkind
# Email: herrnkind@gempa.de
################################################################################
from twisted.internet.threads import deferToThread
from twisted.web import http, server
import seiscomp.datamodel
import seiscomp.logging
from seiscomp.client import Application
from seiscomp.core import Time
from seiscomp.io import Exporter, ExportObjectList
from .http import BaseResource
from .request import RequestOptions
from . import utils
VERSION = "1.1.6"
################################################################################
class _StationRequestOptions(RequestOptions):
Exporters = {
"xml": "fdsnxml",
"fdsnxml": "fdsnxml",
"stationxml": "staxml",
"sc3ml": "trunk",
}
MinTime = Time(0, 1)
VText = ["text"]
# OutputFormats = list(Exporters) + VText
# Default format must be the first, list(Exporters) has random order
OutputFormats = ["xml", "fdsnxml", "stationxml", "sc3ml"] + VText
PLevel = ["level"]
PIncludeRestricted = ["includerestricted"]
PIncludeAvailability = ["includeavailability"]
PUpdateAfter = ["updateafter"]
PMatchTimeSeries = ["matchtimeseries"]
# non standard parameters
PFormatted = ["formatted"]
POSTParams = (
RequestOptions.POSTParams
+ RequestOptions.GeoParams
+ PLevel
+ PIncludeRestricted
+ PIncludeAvailability
+ PUpdateAfter
+ PMatchTimeSeries
+ PFormatted
)
GETParams = RequestOptions.GETParams + RequestOptions.WindowTimeParams + POSTParams
# ---------------------------------------------------------------------------
def __init__(self):
super().__init__()
self.service = "fdsnws-station"
self.includeSta = True
self.includeCha = False
self.includeRes = False
self.restricted = None
self.availability = None
self.updatedAfter = None
self.matchTimeSeries = None
# non standard parameters
self.formatted = None
# ---------------------------------------------------------------------------
def parse(self):
self.parseTime(True)
self.parseChannel()
self.parseGeo()
self.parseOutput()
# level: [network, station, channel, response]
key, value = self.getFirstValue(self.PLevel)
if value is not None:
value = value.lower()
if value in ("network", "net"):
self.includeSta = False
elif value in ("channel", "cha", "chan"):
self.includeCha = True
elif value in ("response", "res", "resp"):
self.includeCha = True
self.includeRes = True
elif value not in ("station", "sta"):
self.raiseValueError(key)
# includeRestricted (optional)
self.restricted = self.parseBool(self.PIncludeRestricted)
# includeAvailability (optionalsc3ml)
self.availability = self.parseBool(self.PIncludeAvailability)
# updatedAfter (optional), currently not supported
self.updatedAfter = self.parseTimeStr(self.PUpdateAfter)
# includeAvailability (optional)
self.matchTimeSeries = self.parseBool(self.PMatchTimeSeries)
# format XML
self.formatted = self.parseBool(self.PFormatted)
# ---------------------------------------------------------------------------
def networkIter(self, inv, matchTime=False):
for i in range(inv.networkCount()):
net = inv.network(i)
for ro in self.streams:
# network code
if ro.channel and not ro.channel.matchNet(net.code()):
continue
# start and end time
if matchTime and ro.time:
try:
end = net.end()
except ValueError:
end = None
if not ro.time.match(net.start(), end):
continue
yield net
break
# ---------------------------------------------------------------------------
def stationIter(self, net, matchTime=False):
for i in range(net.stationCount()):
sta = net.station(i)
# geographic location
if self.geo:
try:
lat = sta.latitude()
lon = sta.longitude()
except ValueError:
continue
if not self.geo.match(lat, lon):
continue
for ro in self.streams:
# station code
if ro.channel and (
not ro.channel.matchSta(sta.code())
or not ro.channel.matchNet(net.code())
):
continue
# start and end time
if matchTime and ro.time:
try:
end = sta.end()
except ValueError:
end = None
if not ro.time.match(sta.start(), end):
continue
yield sta
break
# ---------------------------------------------------------------------------
def locationIter(self, net, sta, matchTime=False):
for i in range(sta.sensorLocationCount()):
loc = sta.sensorLocation(i)
for ro in self.streams:
# location code
if ro.channel and (
not ro.channel.matchLoc(loc.code())
or not ro.channel.matchSta(sta.code())
or not ro.channel.matchNet(net.code())
):
continue
# start and end time
if matchTime and ro.time:
try:
end = loc.end()
except ValueError:
end = None
if not ro.time.match(loc.start(), end):
continue
yield loc
break
# ---------------------------------------------------------------------------
def streamIter(self, net, sta, loc, matchTime, dac):
for i in range(loc.streamCount()):
stream = loc.stream(i)
for ro in self.streams:
# stream code
if ro.channel and (
not ro.channel.matchCha(stream.code())
or not ro.channel.matchLoc(loc.code())
or not ro.channel.matchSta(sta.code())
or not ro.channel.matchNet(net.code())
):
continue
# start and end time
if matchTime and ro.time:
try:
end = stream.end()
except ValueError:
end = None
if not ro.time.match(stream.start(), end):
continue
# match data availability extent
if dac is not None and self.matchTimeSeries:
extent = dac.extent(
net.code(), sta.code(), loc.code(), stream.code()
)
if extent is None or (
ro.time and not ro.time.match(extent.start(), extent.end())
):
continue
yield stream
break
################################################################################
class FDSNStation(BaseResource):
isLeaf = True
# ---------------------------------------------------------------------------
def __init__(
self,
inv,
restricted,
maxObj,
daEnabled,
conditionalRequestsEnabled,
timeInventoryLoaded,
):
super().__init__(VERSION)
self._inv = inv
self._allowRestricted = restricted
self._maxObj = maxObj
self._daEnabled = daEnabled
self._conditionalRequestsEnabled = conditionalRequestsEnabled
self._timeInventoryLoaded = timeInventoryLoaded.seconds()
# additional object count dependent on detail level
self._resLevelCount = (
inv.responsePAZCount()
+ inv.responseFIRCount()
+ inv.responsePolynomialCount()
+ inv.responseIIRCount()
+ inv.responseFAPCount()
)
for i in range(inv.dataloggerCount()):
self._resLevelCount += inv.datalogger(i).decimationCount()
# ---------------------------------------------------------------------------
def render_OPTIONS(self, req):
req.setHeader("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
req.setHeader(
"Access-Control-Allow-Headers",
"Accept, Content-Type, X-Requested-With, Origin",
)
req.setHeader("Content-Type", "text/plain; charset=utf-8")
return ""
# ---------------------------------------------------------------------------
def render_GET(self, req):
# Parse and validate GET parameters
ro = _StationRequestOptions()
try:
ro.parseGET(req.args)
ro.parse()
# the GET operation supports exactly one stream filter
ro.streams.append(ro)
except ValueError as e:
seiscomp.logging.warning(str(e))
return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
return self._prepareRequest(req, ro)
# ---------------------------------------------------------------------------
def render_POST(self, req):
# Parse and validate POST parameters
ro = _StationRequestOptions()
try:
ro.parsePOST(req.content)
ro.parse()
except ValueError as e:
seiscomp.logging.warning(str(e))
return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
return self._prepareRequest(req, ro)
# ---------------------------------------------------------------------------
def _prepareRequest(self, req, ro):
if ro.availability and not self._daEnabled:
msg = "including of availability information not supported"
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
if ro.updatedAfter:
msg = "filtering based on update time not supported"
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
if ro.matchTimeSeries and not self._daEnabled:
msg = "filtering based on available time series not supported"
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
# load data availability if requested
dac = None
if ro.availability or ro.matchTimeSeries:
dac = Application.Instance().getDACache()
if dac is None or len(dac.extents()) == 0:
msg = "no data availabiltiy extent information found"
return self.renderErrorPage(req, http.NO_CONTENT, msg, ro)
# Exporter, 'None' is used for text output
if ro.format in ro.VText:
if ro.includeRes:
msg = "response level output not available in text format"
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
req.setHeader("Content-Type", "text/plain; charset=utf-8")
d = deferToThread(self._processRequestText, req, ro, dac)
else:
exp = Exporter.Create(ro.Exporters[ro.format])
if exp is None:
msg = (
f"output format '{ro.format}' no available, export module "
f"'{ro.Exporters[ro.format]}' could not be loaded."
)
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
req.setHeader("Content-Type", "application/xml; charset=utf-8")
exp.setFormattedOutput(bool(ro.formatted))
d = deferToThread(self._processRequestExp, req, ro, exp, dac)
req.notifyFinish().addErrback(utils.onCancel, d)
d.addBoth(utils.onFinish, req)
# The request is handled by the deferred object
return server.NOT_DONE_YET
# ---------------------------------------------------------------------------
def _processRequestExp(self, req, ro, exp, dac):
if req._disconnected: # pylint: disable=W0212
return False
staCount, locCount, chaCount, extCount, objCount = 0, 0, 0, 0, 0
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
newInv = seiscomp.datamodel.Inventory()
dataloggers, sensors, extents = set(), set(), {}
skipRestricted = not self._allowRestricted or (
ro.restricted is not None and not ro.restricted
)
levelNet = not ro.includeSta
levelSta = ro.includeSta and not ro.includeCha
isConditionalRequest = self._isConditionalRequest(req)
# iterate over inventory networks
for net in ro.networkIter(self._inv, levelNet):
if req._disconnected: # pylint: disable=W0212
return False
if skipRestricted and utils.isRestricted(net):
continue
newNet = seiscomp.datamodel.Network(net)
# Copy comments
for i in range(net.commentCount()):
newNet.add(seiscomp.datamodel.Comment(net.comment(i)))
# iterate over inventory stations of current network
for sta in ro.stationIter(net, levelSta):
if req._disconnected: # pylint: disable=W0212
return False
if skipRestricted and utils.isRestricted(sta):
continue
if not self.checkObjects(req, objCount, self._maxObj):
return False
if ro.includeCha:
numCha, numLoc, d, s, e = self._processStation(
newNet, net, sta, ro, dac, skipRestricted, isConditionalRequest
)
if numCha > 0:
if isConditionalRequest:
self.returnNotModified(req, ro)
return True
locCount += numLoc
chaCount += numCha
extCount += len(e)
objCount += numLoc + numCha + extCount
if not self.checkObjects(req, objCount, self._maxObj):
return False
dataloggers |= d
sensors |= s
for k, v in e.items():
if k not in extents:
extents[k] = v
elif self._matchStation(net, sta, ro, dac):
if isConditionalRequest:
self.returnNotModified(req, ro)
return True
if ro.includeSta:
newSta = seiscomp.datamodel.Station(sta)
# Copy comments
for i in range(sta.commentCount()):
newSta.add(seiscomp.datamodel.Comment(sta.comment(i)))
newNet.add(newSta)
else:
# no station output requested: one matching station
# is sufficient to include the network
newInv.add(newNet)
objCount += 1
break
if newNet.stationCount() > 0:
newInv.add(newNet)
staCount += newNet.stationCount()
objCount += staCount + 1
# Return 204 if no matching inventory was found
if newInv.networkCount() == 0:
msg = "no matching inventory found"
self.writeErrorPage(req, http.NO_CONTENT, msg, ro)
return True
if self._conditionalRequestsEnabled:
req.setHeader(
"Last-Modified", http.datetimeToString(self._timeInventoryLoaded)
)
# Copy references (dataloggers, responses, sensors)
decCount, resCount = 0, 0
if ro.includeCha:
decCount = self._copyReferences(
newInv, req, objCount, self._inv, ro, dataloggers, sensors, self._maxObj
)
if decCount is None:
return False
resCount = (
newInv.responsePAZCount()
+ newInv.responseFIRCount()
+ newInv.responsePolynomialCount()
+ newInv.responseFAPCount()
+ newInv.responseIIRCount()
)
objCount += (
resCount + decCount + newInv.dataloggerCount() + newInv.sensorCount()
)
# Copy data extents
objOut = newInv
if len(extents) > 0:
objCount += 1
da = seiscomp.datamodel.DataAvailability()
for k, v in extents.items():
objCount += 1
da.add(seiscomp.datamodel.DataExtent(v))
objOut = ExportObjectList()
objOut.append(newInv)
objOut.append(da)
sink = utils.Sink(req)
if not exp.write(sink, objOut):
return False
seiscomp.logging.debug(
f"{ro.service}: returned {newInv.networkCount()}Net, {staCount}Sta, "
f"{locCount}Loc, {chaCount}Cha, {newInv.dataloggerCount()}DL, "
f"{decCount}Dec, {newInv.sensorCount()}Sen, {resCount}Res, {extCount}DAExt "
f"(total objects/bytes: {objCount}/{sink.written})"
)
utils.accessLog(req, ro, http.OK, sink.written, None)
return True
# ---------------------------------------------------------------------------
@staticmethod
def _formatEpoch(obj):
df = "%FT%T"
dfMS = "%FT%T.%f"
if obj.start().microseconds() > 0:
start = obj.start().toString(dfMS)
else:
start = obj.start().toString(df)
try:
if obj.end().microseconds() > 0:
end = obj.end().toString(dfMS)
else:
end = obj.end().toString(df)
except ValueError:
end = ""
return start, end
# ---------------------------------------------------------------------------
def _processRequestText(self, req, ro, dac):
if req._disconnected: # pylint: disable=W0212
return False
skipRestricted = not self._allowRestricted or (
ro.restricted is not None and not ro.restricted
)
isConditionalRequest = self._isConditionalRequest(req)
data = ""
lines = []
# level = network
if not ro.includeSta:
data = "#Network|Description|StartTime|EndTime|TotalStations\n"
# iterate over inventory networks
for net in ro.networkIter(self._inv, True):
if req._disconnected: # pylint: disable=W0212
return False
if skipRestricted and utils.isRestricted(net):
continue
# at least one matching station is required
stationFound = False
for sta in ro.stationIter(net, False):
if req._disconnected: # pylint: disable=W0212
return False
if self._matchStation(net, sta, ro, dac) and not (
skipRestricted and utils.isRestricted(sta)
):
stationFound = True
break
if not stationFound:
continue
if isConditionalRequest:
self.returnNotModified(req, ro)
return True
start, end = self._formatEpoch(net)
lines.append(
(
f"{net.code()} {start}",
f"{net.code()}|{net.description()}|{start}|{end}|"
f"{net.stationCount()}\n",
)
)
# level = station
elif not ro.includeCha:
data = (
"#Network|Station|Latitude|Longitude|Elevation|"
"SiteName|StartTime|EndTime\n"
)
# iterate over inventory networks
for net in ro.networkIter(self._inv, False):
if req._disconnected: # pylint: disable=W0212
return False
if skipRestricted and utils.isRestricted(net):
continue
# iterate over inventory stations
for sta in ro.stationIter(net, True):
if req._disconnected: # pylint: disable=W0212
return False
if not self._matchStation(net, sta, ro, dac) or (
skipRestricted and utils.isRestricted(sta)
):
continue
if isConditionalRequest:
self.returnNotModified(req, ro)
return True
try:
lat = str(sta.latitude())
except ValueError:
lat = ""
try:
lon = str(sta.longitude())
except ValueError:
lon = ""
try:
elev = str(sta.elevation())
except ValueError:
elev = ""
try:
desc = sta.description()
except ValueError:
desc = ""
start, end = self._formatEpoch(sta)
lines.append(
(
f"{net.code()}.{sta.code()} {start}",
f"{net.code()}|{sta.code()}|{lat}|{lon}|{elev}|{desc}|"
f"{start}|{end}\n",
)
)
# level = channel (resonse level not supported in text format)
else:
data = (
"#Network|Station|Location|Channel|Latitude|Longitude|"
"Elevation|Depth|Azimuth|Dip|SensorDescription|Scale|"
"ScaleFreq|ScaleUnits|SampleRate|StartTime|EndTime\n"
)
# iterate over inventory networks
for net in ro.networkIter(self._inv, False):
if req._disconnected: # pylint: disable=W0212
return False
if skipRestricted and utils.isRestricted(net):
continue
# iterate over inventory stations, locations, streams
for sta in ro.stationIter(net, False):
if req._disconnected: # pylint: disable=W0212
return False
if skipRestricted and utils.isRestricted(sta):
continue
for loc in ro.locationIter(net, sta, True):
for stream in ro.streamIter(net, sta, loc, True, dac):
if skipRestricted and utils.isRestricted(stream):
continue
if isConditionalRequest:
self.returnNotModified(req, ro)
return True
try:
lat = str(loc.latitude())
except ValueError:
lat = ""
try:
lon = str(loc.longitude())
except ValueError:
lon = ""
try:
elev = str(loc.elevation())
except ValueError:
elev = ""
try:
depth = str(stream.depth())
except ValueError:
depth = ""
try:
azi = str(stream.azimuth())
except ValueError:
azi = ""
try:
dip = str(stream.dip())
except ValueError:
dip = ""
desc = ""
try:
sensor = self._inv.findSensor(stream.sensor())
if sensor is not None:
desc = sensor.description()
except ValueError:
pass
try:
scale = str(stream.gain())
except ValueError:
scale = ""
try:
scaleFreq = str(stream.gainFrequency())
except ValueError:
scaleFreq = ""
try:
scaleUnit = str(stream.gainUnit())
except ValueError:
scaleUnit = ""
try:
sr = str(
stream.sampleRateNumerator()
/ stream.sampleRateDenominator()
)
except (ValueError, ZeroDivisionError):
sr = ""
start, end = self._formatEpoch(stream)
lines.append(
(
f"{net.code()}.{sta.code()}.{loc.code()}."
f"{stream.code()} {start}",
f"{net.code()}|{sta.code()}|{loc.code()}|"
f"{stream.code()}|{lat}|{lon}|{elev}|{depth}|{azi}|"
f"{dip}|{desc}|{scale}|{scaleFreq}|{scaleUnit}|"
f"{sr}|{start}|{end}\n",
)
)
# sort lines and append to final data string
lines.sort(key=lambda line: line[0])
for line in lines:
data += line[1]
# Return 204 if no matching inventory was found
if len(lines) == 0:
msg = "no matching inventory found"
self.writeErrorPage(req, http.NO_CONTENT, msg, ro)
return False
if self._conditionalRequestsEnabled:
req.setHeader(
"Last-Modified", http.datetimeToString(self._timeInventoryLoaded)
)
dataBin = utils.b_str(data)
utils.writeTSBin(req, dataBin)
seiscomp.logging.debug(
f"{ro.service}: returned {len(lines)} lines (total bytes: {len(dataBin)})"
)
utils.accessLog(req, ro, http.OK, len(dataBin), None)
return True
# ---------------------------------------------------------------------------
def _isConditionalRequest(self, req):
# support for time based conditional requests
if not self._conditionalRequestsEnabled:
return False
if req.method not in (b"GET", b"HEAD"):
return False
if req.getHeader("If-None-Match") is not None:
return False
modifiedSince = req.getHeader("If-Modified-Since")
if not modifiedSince:
return False
modifiedSince = utils.stringToDatetime(modifiedSince)
return modifiedSince and self._timeInventoryLoaded <= modifiedSince
# ---------------------------------------------------------------------------
# Checks if at least one location and channel combination matches the
# request options
@staticmethod
def _matchStation(net, sta, ro, dac):
# No filter: return true immediately
if dac is None and (
not ro.channel or (not ro.channel.loc and not ro.channel.cha)
):
return True
for loc in ro.locationIter(net, sta, False):
if dac is None and not ro.channel.cha and not ro.time:
return True
for _ in ro.streamIter(net, sta, loc, False, dac):
return True
return False
# ---------------------------------------------------------------------------
# Adds a deep copy of the specified station to the new network if the
# location and channel combination matches the request options (if any)
@staticmethod
def _processStation(
newNet, net, sta, ro, dac, skipRestricted, isConditionalRequest
):
chaCount = 0
dataloggers, sensors, extents = set(), set(), {}
newSta = seiscomp.datamodel.Station(sta)
includeAvailability = dac is not None and ro.availability
# Copy comments
for i in range(sta.commentCount()):
newSta.add(seiscomp.datamodel.Comment(sta.comment(i)))
for loc in ro.locationIter(net, sta, True):
newLoc = seiscomp.datamodel.SensorLocation(loc)
# Copy comments
for i in range(loc.commentCount()):
newLoc.add(seiscomp.datamodel.Comment(loc.comment(i)))
for stream in ro.streamIter(net, sta, loc, True, dac):
if skipRestricted and utils.isRestricted(stream):
continue
if isConditionalRequest:
return 1, 1, [], [], []
newCha = seiscomp.datamodel.Stream(stream)
# Copy comments
for i in range(stream.commentCount()):
newCha.add(seiscomp.datamodel.Comment(stream.comment(i)))
newLoc.add(newCha)
dataloggers.add(stream.datalogger())
sensors.add(stream.sensor())
if includeAvailability:
ext = dac.extent(net.code(), sta.code(), loc.code(), stream.code())
if ext is not None and ext.publicID() not in extents:
extents[ext.publicID()] = ext
if newLoc.streamCount() > 0:
newSta.add(newLoc)
chaCount += newLoc.streamCount()
if newSta.sensorLocationCount() > 0:
newNet.add(newSta)
return chaCount, newSta.sensorLocationCount(), dataloggers, sensors, extents
return 0, 0, [], [], []
# ---------------------------------------------------------------------------
# Copy references (data loggers, sensors, responses) depended on request
# options
def _copyReferences(
self, newInv, req, objCount, inv, ro, dataloggers, sensors, maxObj
):
responses = set()
decCount = 0
# datalogger
for i in range(inv.dataloggerCount()):
if req._disconnected: # pylint: disable=W0212
return None
logger = inv.datalogger(i)
if logger.publicID() not in dataloggers:
continue
newLogger = seiscomp.datamodel.Datalogger(logger)
newInv.add(newLogger)
# decimations are only needed for responses
if ro.includeRes:
for j in range(logger.decimationCount()):
decimation = logger.decimation(j)
newLogger.add(seiscomp.datamodel.Decimation(decimation))
# collect response ids
filterStr = ""
try:
filterStr = f"{decimation.analogueFilterChain().content()} "
except ValueError:
pass
try:
filterStr += decimation.digitalFilterChain().content()
except ValueError:
pass
for resp in filterStr.split():
responses.add(resp)
decCount += newLogger.decimationCount()
objCount += newInv.dataloggerCount() + decCount
resCount = len(responses)
if not self.checkObjects(req, objCount + resCount, maxObj):
return None
# sensor
for i in range(inv.sensorCount()):
if req._disconnected: # pylint: disable=W0212
return None
sensor = inv.sensor(i)
if sensor.publicID() not in sensors:
continue
newSensor = seiscomp.datamodel.Sensor(sensor)
newInv.add(newSensor)
resp = newSensor.response()
if resp:
if ro.includeRes:
responses.add(resp)
else:
# no responses: remove response reference to avoid missing
# response warning of exporter
newSensor.setResponse("")
objCount += newInv.sensorCount()
resCount = len(responses)
if not self.checkObjects(req, objCount + resCount, maxObj):
return None
# responses
if ro.includeRes:
if req._disconnected: # pylint: disable=W0212
return None
for i in range(inv.responsePAZCount()):
resp = inv.responsePAZ(i)
if resp.publicID() in responses:
newInv.add(seiscomp.datamodel.ResponsePAZ(resp))
if req._disconnected: # pylint: disable=W0212
return None
for i in range(inv.responseFIRCount()):
resp = inv.responseFIR(i)
if resp.publicID() in responses:
newInv.add(seiscomp.datamodel.ResponseFIR(resp))
if req._disconnected: # pylint: disable=W0212
return None
for i in range(inv.responsePolynomialCount()):
resp = inv.responsePolynomial(i)
if resp.publicID() in responses:
newInv.add(seiscomp.datamodel.ResponsePolynomial(resp))
if req._disconnected: # pylint: disable=W0212
return None
for i in range(inv.responseFAPCount()):
resp = inv.responseFAP(i)
if resp.publicID() in responses:
newInv.add(seiscomp.datamodel.ResponseFAP(resp))
if req._disconnected: # pylint: disable=W0212
return None
for i in range(inv.responseIIRCount()):
resp = inv.responseIIR(i)
if resp.publicID() in responses:
newInv.add(seiscomp.datamodel.ResponseIIR(resp))
return decCount
# vim: ts=4 et

View File

@ -0,0 +1,201 @@
################################################################################
# Copyright (C) 2013-2014 gempa GmbH
#
# Common utility functions
#
# Author: Stephan Herrnkind
# Email: herrnkind@gempa.de
################################################################################
import socket
import traceback
import twisted
from twisted.internet import reactor, defer
from twisted.python.failure import Failure
from twisted.web import http
import seiscomp.logging
import seiscomp.core
import seiscomp.io
from seiscomp.client import Application
twisted_version = (twisted.version.major, twisted.version.minor, twisted.version.micro)
# -------------------------------------------------------------------------------
# Converts a unicode string to a byte string
def b_str(unicode_string):
return unicode_string.encode("utf-8", "replace")
# -------------------------------------------------------------------------------
# Converts a byte string to a unicode string
def u_str(byte_string):
return byte_string.decode("utf-8", "replace")
# -------------------------------------------------------------------------------
# Tests if a SC3 inventory object is restricted
def isRestricted(obj):
try:
return obj.restricted()
except ValueError:
return False
# -------------------------------------------------------------------------------
# Thread-safe write of string data using reactor main thread
def writeTS(req, data):
reactor.callFromThread(req.write, b_str(data))
# -------------------------------------------------------------------------------
# Thread-safe write of binary data using reactor main thread
def writeTSBin(req, data):
reactor.callFromThread(req.write, data)
# -------------------------------------------------------------------------------
# Finish requests deferred to threads
def onFinish(result, req):
seiscomp.logging.debug(f"finish value = {str(result)}")
if isinstance(result, Failure):
err = result.value
if isinstance(err, defer.CancelledError):
seiscomp.logging.error("request canceled")
return
seiscomp.logging.error(
f"{result.getErrorMessage()} "
f"{traceback.format_tb(result.getTracebackObject())}"
)
else:
if result:
seiscomp.logging.debug("request successfully served")
else:
seiscomp.logging.debug("request failed")
reactor.callFromThread(req.finish)
# -------------------------------------------------------------------------------
# Handle connection errors
def onCancel(failure, req):
if failure:
seiscomp.logging.error(
f"{failure.getErrorMessage()} "
f"{traceback.format_tb(failure.getTracebackObject())}"
)
else:
seiscomp.logging.error("request canceled")
req.cancel()
# -------------------------------------------------------------------------------
# Handle premature connection reset
def onResponseFailure(_, call):
seiscomp.logging.error("response canceled")
call.cancel()
# -------------------------------------------------------------------------------
# Renders error page if the result set exceeds the configured maximum number
# objects
def accessLog(req, ro, code, length, err):
logger = Application.Instance()._accessLog # pylint: disable=W0212
if logger is None:
return
logger.log(AccessLogEntry(req, ro, code, length, err))
# -------------------------------------------------------------------------------
# Compability function for stringToDatetime() change in Twisted 24.7, see
# https://github.com/twisted/twisted/commit/731e370dfc5d2f7224dc1e12931ddf5c51b211a6
def stringToDatetime(dateString):
if twisted_version < (24, 7):
return http.stringToDatetime(dateString)
# Since version 24.7 the argument needs to be a byte string
return http.stringToDatetime(dateString.encode("ascii"))
################################################################################
class Sink(seiscomp.io.ExportSink):
def __init__(self, request):
super().__init__()
self.request = request
self.written = 0
def write(self, data):
if self.request._disconnected: # pylint: disable=W0212
return -1
writeTSBin(self.request, data)
self.written += len(data)
return len(data)
################################################################################
class AccessLogEntry:
def __init__(self, req, ro, code, length, err):
# user agent
agent = req.getHeader("User-Agent")
if agent is None:
agent = ""
else:
agent = agent[:100].replace("|", " ")
if err is None:
err = ""
service, user, accessTime, procTime = "", "", "", 0
net, sta, loc, cha = "", "", "", ""
if ro is not None:
# processing time in milliseconds
procTime = int((seiscomp.core.Time.GMT() - ro.accessTime).length() * 1000.0)
service = ro.service
if ro.userName is not None:
user = ro.userName
accessTime = str(ro.accessTime)
if ro.channel is not None:
if ro.channel.net is not None:
net = ",".join(ro.channel.net)
if ro.channel.sta is not None:
sta = ",".join(ro.channel.sta)
if ro.channel.loc is not None:
loc = ",".join(ro.channel.loc)
if ro.channel.cha is not None:
cha = ",".join(ro.channel.cha)
# The host name of the client is resolved in the __str__ method by the
# logging thread so that a long running DNS reverse lookup may not slow
# down the request
self.msgPrefix = f"{service}|{u_str(req.getRequestHostname())}|{accessTime}|"
xff = req.requestHeaders.getRawHeaders("x-forwarded-for")
if xff:
self.userIP = xff[0].split(",")[0].strip()
else:
self.userIP = req.getClientIP()
self.clientIP = req.getClientIP()
self.msgSuffix = (
f"|{self.clientIP}|{length}|{procTime}|{err}|{agent}|{code}|{user}|{net}"
f"|{sta}|{loc}|{cha}||"
)
def __str__(self):
try:
userHost = socket.gethostbyaddr(self.userIP)[0]
except socket.herror:
userHost = self.userIP
return self.msgPrefix + userHost + self.msgSuffix
# vim: ts=4 et

1275
lib/python/seiscomp/geo.py Normal file

File diff suppressed because it is too large Load Diff

2532
lib/python/seiscomp/io.py Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,386 @@
############################################################################
# Copyright (C) by gempa GmbH, GFZ Potsdam #
# #
# You can redistribute and/or modify this program under the #
# terms of the SeisComP Public License. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# SeisComP Public License for more details. #
############################################################################
from __future__ import print_function
import os
import sys
import time
import string
import subprocess
import seiscomp.config
class Template(string.Template):
idpattern = r'[_a-z][_a-z0-9.]*'
class Environment(seiscomp.config.Config):
def __init__(self, rootPath):
seiscomp.config.Config.__init__(self)
self.SEISCOMP_ROOT = rootPath
try:
self.home_dir = os.environ["HOME"]
except:
self.home_dir = "."
try:
self.local_config_dir = os.environ["SEISCOMP_LOCAL_CONFIG"]
except:
self.local_config_dir = os.path.join(self.home_dir, ".seiscomp")
self.root = rootPath
self.bin_dir = os.path.join(self.root, "bin")
self.data_dir = os.path.join(self.root, "share")
self.etc_dir = os.path.join(self.root, "etc")
self.etc_defaults_dir = os.path.join(self.root, "etc", "defaults")
self.descriptions_dir = os.path.join(self.root, "etc", "descriptions")
self.key_dir = os.path.join(self.root, "etc", "key")
self.var_dir = os.path.join(self.root, "var")
self.log_dir = os.path.join(self.local_config_dir, "log")
self.cwd = None
self.last_template_file = None
self._csv = False
self._readConfig()
os.environ["SEISCOMP_ROOT"] = self.SEISCOMP_ROOT
# Add LD_LIBRARY_PATH and PATH to OS environment
LD_LIBRARY_PATH = os.path.join(self.SEISCOMP_ROOT, "lib")
BIN_PATH = os.path.join(self.SEISCOMP_ROOT, "bin")
SBIN_PATH = os.path.join(self.SEISCOMP_ROOT, "sbin")
PATH = BIN_PATH + ":" + SBIN_PATH
PYTHONPATH = os.path.join(self.SEISCOMP_ROOT, "lib", "python")
try:
LD_LIBRARY_PATH = os.environ["LD_LIBRARY_PATH"] + \
":" + LD_LIBRARY_PATH
except:
pass
os.environ["LD_LIBRARY_PATH"] = LD_LIBRARY_PATH
try:
PATH = PATH + ":" + os.environ["PATH"]
except:
pass
os.environ["PATH"] = PATH
try:
PYTHONPATH = os.environ["PYTHONPATH"] + ":" + PYTHONPATH
except:
pass
os.environ["PYTHONPATH"] = PYTHONPATH
# Create required directories
try:
os.makedirs(os.path.join(self.root, "var", "log"))
except:
pass
try:
os.makedirs(os.path.join(self.root, "var", "run"))
except:
pass
def _readConfig(self):
self.syslog = False
# Read configuration file
kernelCfg = os.path.join(self.root, "etc", "kernel.cfg")
if self.readConfig(kernelCfg) == False:
return
try:
self.syslog = self.getBool("syslog")
except:
pass
# Changes into the SEISCOMP_ROOT directory
def chroot(self):
if self.root:
# Remember current directory
self.cwd = os.getcwd()
os.chdir(self.SEISCOMP_ROOT)
self.root = ""
# Changes back to the current workdir
def chback(self):
if self.cwd:
os.chdir(self.cwd)
self.cwd = None
self.root = self.SEISCOMP_ROOT
def resolvePath(self, path):
return path.replace("@LOGDIR@", self.log_dir)\
.replace("@CONFIGDIR@", self.local_config_dir)\
.replace("@DEFAULTCONFIGDIR@", self.etc_defaults_dir)\
.replace("@SYSTEMCONFIGDIR@", self.etc_dir)\
.replace("@ROOTDIR@", self.root)\
.replace("@DATADIR@", self.data_dir)\
.replace("@KEYDIR@", self.key_dir)\
.replace("@HOMEDIR@", self.home_dir)
def setCSVOutput(self, csv):
self._csv = csv
def enableModule(self, name):
runFile = os.path.join(self.root, "etc", "init", name + ".auto")
if os.path.exists(runFile):
print("%s is already enabled" % name)
return 0
try:
open(runFile, 'w').close()
print("enabled %s" % name)
return 0
except Exception as exc:
sys.stderr.write(str(exc) + "\n")
sys.stderr.flush()
return 0
def disableModule(self, name):
runFile = os.path.join(self.root, "etc", "init", name + ".auto")
if not os.path.exists(runFile):
print("%s is not enabled" % name)
return 0
try:
os.remove(runFile)
print("disabled %s" % name)
except Exception as exc:
sys.stderr.write(str(exc) + "\n")
sys.stderr.flush()
return 0
def isModuleEnabled(self, name):
runFile = os.path.join(self.root, "etc", "init", name + ".auto")
return os.path.exists(runFile) == True
# Return the module name from a path
def moduleName(self, path):
return os.path.splitext(os.path.basename(path))[0]
# Returns a module's lockfile
def lockFile(self, module):
return os.path.join(self.root, "var", "run", module + ".pid")
# Returns a module's runfile
def runFile(self, module):
return os.path.join(self.root, "var", "run", module + ".run")
# Returns a module's logfile
def logFile(self, module):
return os.path.join(self.root, "var", "log", module + ".log")
# Returns the binary file path of a given module name
def binaryFile(self, module):
# return os.path.join(self.root, "bin/" + module)
return module
def start(self, module, binary, params, nohup=False):
cmd = binary + " " + params + " >" + self.logFile(module) + " 2>&1"
if nohup:
cmd = "nohup " + cmd + " &"
return os.system(cmd)
def stop(self, module, timeout):
return self.killWait(module, timeout)
def tryLock(self, module, timeout = None):
if timeout is None:
return subprocess.call("trylock " + self.lockFile(module), shell=True) == 0
else:
try:
timeoutSeconds = int(timeout)
except:
print("Invalid timeout parameter, expected positive integer")
raise
return subprocess.call("waitlock %d \"%s\"" % (timeoutSeconds, self.lockFile(module)), shell=True) == 0
def killWait(self, module, timeout):
lockfile = self.lockFile(module)
# Open pid file
f = open(lockfile, "r")
# Try to read the pid
try:
pid = int(f.readline())
except:
f.close()
raise
# Kill process with pid
subprocess.call("kill %d" % pid, shell=True)
if subprocess.call("waitlock %d \"%s\"" % (timeout, lockfile), shell=True) != 0:
print("timeout exceeded")
subprocess.call("kill -9 %d" % pid, shell=True)
# Remove pid file
try:
os.remove(lockfile)
except:
pass
return True
def processTemplate(self, templateFile, paths, params, printError=False):
self.last_template_file = None
for tp in paths:
if os.path.exists(os.path.join(tp, templateFile)):
break
else:
if printError:
print("Error: template %s not found" % templateFile)
return ""
filename = os.path.join(tp, templateFile)
self.last_template_file = filename
try:
t = Template(open(filename).read())
except:
if printError:
print("Error: template %s not readable" % filename)
return ""
params['date'] = time.ctime()
params['template'] = filename
while True:
try:
return t.substitute(params)
except KeyError as e:
print("warning: $%s is not defined in %s" % (e.args[0], filename))
params[e.args[0]] = ""
except ValueError as e:
raise ValueError("%s: %s" % (filename, str(e)))
def logStatus(self, name, isRunning, shouldRun, isEnabled):
if self._csv == False:
sys.stdout.write("%-20s is " % name)
if not isRunning:
sys.stdout.write("not ")
sys.stdout.write("running")
if not isRunning and shouldRun:
sys.stdout.write(" [WARNING]")
sys.stdout.write("\n")
else:
sys.stdout.write("%s;%d;%d;%d\n" % (
name, int(isRunning), int(shouldRun), int(isEnabled)))
sys.stdout.flush()
def log(self, line):
sys.stdout.write(line + "\n")
sys.stdout.flush()
# The module interface which implementes the basic default operations.
# Each script can define its own handlers to customize the behaviour.
# Available handlers:
# start()
# stop()
# check()
# status(shouldRun)
# setup(params = dict{name, values as []})
# updateConfig()
class Module:
def __init__(self, env, name):
self.env = env
self.name = name
# The start order
self.order = 100
# Defines if this is a kernel module or not.
# Kernel modules are always started
self.isKernelModule = False
# Defines if this is a config only module
self.isConfigModule = False
# Set default timeout when stopping a module to 10 seconds before killing it
self.killTimeout = 10
# Set default timeout when reloading a module to 10 seconds
self.reloadTimeout = 10
def _get_start_params(self):
# Run as daemon
params = "-D"
# Enable syslog if configured
if self.env.syslog == True:
params = params + "s"
params = params + " -l " + self.env.lockFile(self.name)
return params
def _run(self):
return self.env.start(self.name, self.env.binaryFile(self.name), self._get_start_params())
def isRunning(self):
return self.env.tryLock(self.name) == False
def start(self):
if self.isRunning():
self.env.log("%s is already running" % self.name)
return 1
self.env.log("starting %s" % self.name)
return self._run()
def stop(self):
if not self.isRunning():
self.env.log("%s is not running" % self.name)
return 1
self.env.log("shutting down %s" % self.name)
# Default timeout to 10 seconds
return self.env.stop(self.name, self.killTimeout)
def reload(self):
self.env.log("reload not supported by %s" % self.name)
return 1
# Check is the same as start. If a module should be checked
# is decided by the control script which check the existence
# of a corresponding run file.
def check(self):
return self.start()
def status(self, shouldRun):
self.env.logStatus(self.name, self.isRunning(), shouldRun, self.env.isModuleEnabled(
self.name) or isinstance(self, CoreModule))
def requiresKernelModules(self):
# The default handler triggers a start of kernel modules before updating
# its configuration
return True
def updateConfigProxy(self):
# This function must return either a string containing the module name
# of the proxy module that should be configured as well or None.
return None
def updateConfig(self):
# This function must return a number indicating the error code where
# 0 means no error. The default handler doesn't do anything.
return 0
def printCrontab(self):
# The default handler doesn't do anything
return 0
def supportsAliases(self):
# The default handler does not support aliases
return False
# Define a kernel core module which is started always
class CoreModule(Module):
def __init__(self, env, name):
Module.__init__(self, env, name)

View File

View File

@ -0,0 +1,6 @@
from __future__ import (absolute_import, division, print_function,
unicode_literals)
class DBError(Exception):
pass

Some files were not shown because too many files have changed in this diff Show More