[installation] Init with inital config for global
This commit is contained in:
1409
lib/python/gempa/CAPS.py
Normal file
1409
lib/python/gempa/CAPS.py
Normal file
File diff suppressed because it is too large
Load Diff
BIN
lib/python/gempa/CAPS.pyo
Normal file
BIN
lib/python/gempa/CAPS.pyo
Normal file
Binary file not shown.
154
lib/python/gempa/Processing.py
Normal file
154
lib/python/gempa/Processing.py
Normal file
@ -0,0 +1,154 @@
|
||||
# This file was automatically generated by SWIG (https://www.swig.org).
|
||||
# Version 4.3.0
|
||||
#
|
||||
# Do not make changes to this file unless you know what you are doing - modify
|
||||
# the SWIG interface file instead.
|
||||
|
||||
from sys import version_info as _swig_python_version_info
|
||||
# Import the low-level C/C++ module
|
||||
if __package__ or "." in __name__:
|
||||
from . import _gProcessing
|
||||
else:
|
||||
import _gProcessing
|
||||
|
||||
try:
|
||||
import builtins as __builtin__
|
||||
except ImportError:
|
||||
import __builtin__
|
||||
|
||||
def _swig_repr(self):
|
||||
try:
|
||||
strthis = "proxy of " + self.this.__repr__()
|
||||
except __builtin__.Exception:
|
||||
strthis = ""
|
||||
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_instance_variable(set):
|
||||
def set_instance_attr(self, name, value):
|
||||
if name == "this":
|
||||
set(self, name, value)
|
||||
elif name == "thisown":
|
||||
self.this.own(value)
|
||||
elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
|
||||
set(self, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add instance attributes to %s" % self)
|
||||
return set_instance_attr
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_class_variable(set):
|
||||
def set_class_attr(cls, name, value):
|
||||
if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
|
||||
set(cls, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add class attributes to %s" % cls)
|
||||
return set_class_attr
|
||||
|
||||
|
||||
def _swig_add_metaclass(metaclass):
|
||||
"""Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
|
||||
def wrapper(cls):
|
||||
return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
|
||||
return wrapper
|
||||
|
||||
|
||||
class _SwigNonDynamicMeta(type):
|
||||
"""Meta class to enforce nondynamic attributes (no new attributes) for a class"""
|
||||
__setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
|
||||
|
||||
|
||||
class Ecef2Enu(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, lat, lon, h):
|
||||
_gProcessing.Ecef2Enu_swiginit(self, _gProcessing.new_Ecef2Enu(lat, lon, h))
|
||||
|
||||
def convert(self, x, y, z):
|
||||
return _gProcessing.Ecef2Enu_convert(self, x, y, z)
|
||||
__swig_destroy__ = _gProcessing.delete_Ecef2Enu
|
||||
|
||||
# Register Ecef2Enu in _gProcessing:
|
||||
_gProcessing.Ecef2Enu_swigregister(Ecef2Enu)
|
||||
class Enu2Ecef(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, lat, lon, h):
|
||||
_gProcessing.Enu2Ecef_swiginit(self, _gProcessing.new_Enu2Ecef(lat, lon, h))
|
||||
|
||||
def convert(self, e, n, u):
|
||||
return _gProcessing.Enu2Ecef_convert(self, e, n, u)
|
||||
__swig_destroy__ = _gProcessing.delete_Enu2Ecef
|
||||
|
||||
# Register Enu2Ecef in _gProcessing:
|
||||
_gProcessing.Enu2Ecef_swigregister(Enu2Ecef)
|
||||
|
||||
def geodetic2ecef(lat, lon, h):
|
||||
return _gProcessing.geodetic2ecef(lat, lon, h)
|
||||
|
||||
def distance(strike1, dip1, rake1, strike2, dip2, rake2, scaleX=1.0, scaleY=1.0, scaleZ=1.0):
|
||||
return _gProcessing.distance(strike1, dip1, rake1, strike2, dip2, rake2, scaleX, scaleY, scaleZ)
|
||||
|
||||
def rotAngleNP(strike1, dip1, rake1, strike2, dip2, rake2):
|
||||
return _gProcessing.rotAngleNP(strike1, dip1, rake1, strike2, dip2, rake2)
|
||||
|
||||
def rotAngleMT(strike1, dip1, rake1, strike2, dip2, rake2):
|
||||
return _gProcessing.rotAngleMT(strike1, dip1, rake1, strike2, dip2, rake2)
|
||||
|
||||
def otherNodalPlane(inStrike, inDip, inRake):
|
||||
return _gProcessing.otherNodalPlane(inStrike, inDip, inRake)
|
||||
|
||||
def nodalPlane2Tensor(strike, dip, rake):
|
||||
return _gProcessing.nodalPlane2Tensor(strike, dip, rake)
|
||||
class Vector3D(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_gProcessing.Vector3D_swiginit(self, _gProcessing.new_Vector3D(*args))
|
||||
|
||||
def length(self):
|
||||
return _gProcessing.Vector3D_length(self)
|
||||
|
||||
def dot(self, v):
|
||||
return _gProcessing.Vector3D_dot(self, v)
|
||||
|
||||
def cross(self, a, b):
|
||||
return _gProcessing.Vector3D_cross(self, a, b)
|
||||
|
||||
def normalize(self):
|
||||
return _gProcessing.Vector3D_normalize(self)
|
||||
|
||||
def __imul__(self, scale):
|
||||
return _gProcessing.Vector3D___imul__(self, scale)
|
||||
|
||||
def __mul__(self, *args):
|
||||
return _gProcessing.Vector3D___mul__(self, *args)
|
||||
|
||||
def __iadd__(self, other):
|
||||
return _gProcessing.Vector3D___iadd__(self, other)
|
||||
|
||||
def __isub__(self, other):
|
||||
return _gProcessing.Vector3D___isub__(self, other)
|
||||
|
||||
def __add__(self, other):
|
||||
return _gProcessing.Vector3D___add__(self, other)
|
||||
|
||||
def __sub__(self, other):
|
||||
return _gProcessing.Vector3D___sub__(self, other)
|
||||
|
||||
def fromAngles(self, radAzimuth, radDip):
|
||||
return _gProcessing.Vector3D_fromAngles(self, radAzimuth, radDip)
|
||||
|
||||
def toAngles(self, radAzimuth, radDip):
|
||||
return _gProcessing.Vector3D_toAngles(self, radAzimuth, radDip)
|
||||
x = property(_gProcessing.Vector3D_x_get, _gProcessing.Vector3D_x_set)
|
||||
y = property(_gProcessing.Vector3D_y_get, _gProcessing.Vector3D_y_set)
|
||||
z = property(_gProcessing.Vector3D_z_get, _gProcessing.Vector3D_z_set)
|
||||
__swig_destroy__ = _gProcessing.delete_Vector3D
|
||||
|
||||
# Register Vector3D in _gProcessing:
|
||||
_gProcessing.Vector3D_swigregister(Vector3D)
|
||||
|
||||
BIN
lib/python/gempa/Processing.pyo
Normal file
BIN
lib/python/gempa/Processing.pyo
Normal file
Binary file not shown.
0
lib/python/gempa/__init__.py
Normal file
0
lib/python/gempa/__init__.py
Normal file
BIN
lib/python/gempa/_gCAPS.so
Normal file
BIN
lib/python/gempa/_gCAPS.so
Normal file
Binary file not shown.
BIN
lib/python/gempa/_gProcessing.so
Normal file
BIN
lib/python/gempa/_gProcessing.so
Normal file
Binary file not shown.
0
lib/python/licsar2caps/__init__.py
Normal file
0
lib/python/licsar2caps/__init__.py
Normal file
104
lib/python/licsar2caps/journal.py
Normal file
104
lib/python/licsar2caps/journal.py
Normal file
@ -0,0 +1,104 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
############################################################################
|
||||
# Copyright (C) 2024 by gempa GmbH #
|
||||
# #
|
||||
# All Rights Reserved. #
|
||||
# #
|
||||
# NOTICE: All information contained herein is, and remains #
|
||||
# the property of gempa GmbH and its suppliers, if any. The intellectual #
|
||||
# and technical concepts contained herein are proprietary to gempa GmbH #
|
||||
# and its suppliers. #
|
||||
# Dissemination of this information or reproduction of this material #
|
||||
# is strictly forbidden unless prior written permission is obtained #
|
||||
# from gempa GmbH. #
|
||||
############################################################################
|
||||
|
||||
import os
|
||||
|
||||
from seiscomp import logging
|
||||
|
||||
from gempa import CAPS
|
||||
|
||||
|
||||
class JournalItem:
|
||||
def __init__(self, startTime=None, endTime=None):
|
||||
self.startTime = startTime
|
||||
self.endTime = endTime
|
||||
|
||||
|
||||
class Journal:
|
||||
# -------------------------------------------------------------------------
|
||||
def __init__(self):
|
||||
self.items = {}
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
def get(self, streamID):
|
||||
return self.items.get(streamID)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
def read(self, filename):
|
||||
try:
|
||||
f = open(filename, "r", encoding="UTF-8")
|
||||
except Exception as err:
|
||||
logging.error(f"Journal: Could not open file: {err}")
|
||||
return False
|
||||
|
||||
try:
|
||||
lineNo = 0
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
|
||||
try:
|
||||
stationID, strStartTime, strEndTime = line.split(" ")
|
||||
except ValueError:
|
||||
logging.error(
|
||||
f"Journal: Invalid line format in line {lineNo}"
|
||||
)
|
||||
return False
|
||||
|
||||
item = JournalItem()
|
||||
|
||||
item.startTime = CAPS.Time.FromString(strStartTime, "%FT%T.%Z")
|
||||
item.endTime = CAPS.Time.FromString(strEndTime, "%FT%T.%Z")
|
||||
|
||||
self.items[stationID] = item
|
||||
|
||||
lineNo += 1
|
||||
except IOError as err:
|
||||
logging.error(f"Journal: Could not read journal from file: {err}")
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
logging.info("Recovered journal")
|
||||
for k, v in self.items.items():
|
||||
logging.info(f" + {k} {v.startTime.iso()} ~ {v.endTime.iso()}")
|
||||
|
||||
logging.info("End")
|
||||
|
||||
return True
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
def write(self, filename):
|
||||
path = os.path.dirname(filename)
|
||||
if not path:
|
||||
return False
|
||||
|
||||
if not os.path.exists(path):
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except Exception as err:
|
||||
logging.error(f"Journal: Could not create directory: {err}")
|
||||
return False
|
||||
|
||||
try:
|
||||
with open(filename, "w", encoding="UTF-8") as f:
|
||||
for k, v in self.items.items():
|
||||
f.write(f"{k} {v.startTime.iso()} {v.endTime.iso()}\n")
|
||||
except Exception as err:
|
||||
logging.error(f"Journal: Faild to write journal: {err}")
|
||||
return False
|
||||
|
||||
return True
|
||||
127
lib/python/licsar2caps/streammap.py
Normal file
127
lib/python/licsar2caps/streammap.py
Normal file
@ -0,0 +1,127 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
############################################################################
|
||||
# Copyright (C) 2024 by gempa GmbH #
|
||||
# #
|
||||
# All Rights Reserved. #
|
||||
# #
|
||||
# NOTICE: All information contained herein is, and remains #
|
||||
# the property of gempa GmbH and its suppliers, if any. The intellectual #
|
||||
# and technical concepts contained herein are proprietary to gempa GmbH #
|
||||
# and its suppliers. #
|
||||
# Dissemination of this information or reproduction of this material #
|
||||
# is strictly forbidden unless prior written permission is obtained #
|
||||
# from gempa GmbH. #
|
||||
############################################################################
|
||||
|
||||
import os
|
||||
|
||||
from seiscomp import logging
|
||||
|
||||
|
||||
class StreamMapItem:
|
||||
def __init__(self):
|
||||
self.networkCode = ""
|
||||
self.stationCode = ""
|
||||
self.locationCode = ""
|
||||
self.stationID = ""
|
||||
self.baseCode = None
|
||||
self.folder = None
|
||||
self.startTime = None
|
||||
self.endTime = None
|
||||
|
||||
|
||||
class StreamMap:
|
||||
# -------------------------------------------------------------------------
|
||||
def __init__(self):
|
||||
self.items = {}
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
def get(self, streamID):
|
||||
return self.items.get(streamID)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
def read(self, filename):
|
||||
try:
|
||||
f = open(filename, "r", encoding="UTF-8")
|
||||
except Exception as err:
|
||||
logging.error(f"Stream map: Could not open file: {err}")
|
||||
return False
|
||||
|
||||
try:
|
||||
lineNo = -1
|
||||
for line in f:
|
||||
lineNo += 1
|
||||
line = line.strip()
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
|
||||
if len(line) == 0:
|
||||
continue
|
||||
|
||||
folder = line.strip()
|
||||
|
||||
toks = folder.split("_")
|
||||
tokCount = len(toks)
|
||||
if tokCount != 3:
|
||||
logging.error(
|
||||
f"Stream map: Invalid stream ID in line {lineNo}"
|
||||
)
|
||||
continue
|
||||
|
||||
item = StreamMapItem()
|
||||
item.networkCode = toks[0]
|
||||
item.stationCode = toks[1]
|
||||
item.locationCode = toks[2]
|
||||
item.baseCode = str(int(item.networkCode[0:3]))
|
||||
item.folder = folder
|
||||
item.stationID = (
|
||||
item.networkCode
|
||||
+ "."
|
||||
+ item.stationCode
|
||||
+ "."
|
||||
+ item.locationCode
|
||||
)
|
||||
|
||||
self.items[item.stationID] = item
|
||||
except IOError as err:
|
||||
logging.error(
|
||||
f"Stream map: Could not read stream map from file: {err}"
|
||||
)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
if len(self.items) == 0:
|
||||
logging.info("No streams configured: Nothing todo")
|
||||
return False
|
||||
|
||||
logging.info("Configured stations")
|
||||
for k, _v in self.items.items():
|
||||
logging.info(f" + {k}")
|
||||
|
||||
logging.info("End")
|
||||
|
||||
return True
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
def write(self, filename):
|
||||
path = os.path.dirname(filename)
|
||||
if not path:
|
||||
return False
|
||||
|
||||
if not os.path.exists(path):
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except Exception as err:
|
||||
logging.error(f"Stream map: Could not create directory: {err}")
|
||||
return False
|
||||
|
||||
try:
|
||||
with open(filename, "w", encoding="UTF-8") as f:
|
||||
for k, v in self.items.items():
|
||||
f.write(f"{k} {v.startTime.iso()} {v.endTime.iso()}\n")
|
||||
except Exception as err:
|
||||
logging.error(f"Stream map: Could not open file: {err}")
|
||||
return False
|
||||
|
||||
return True
|
||||
36
lib/python/licsar2caps/utils.py
Normal file
36
lib/python/licsar2caps/utils.py
Normal file
@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
############################################################################
|
||||
# Copyright (C) 2024 by gempa GmbH #
|
||||
# #
|
||||
# All Rights Reserved. #
|
||||
# #
|
||||
# NOTICE: All information contained herein is, and remains #
|
||||
# the property of gempa GmbH and its suppliers, if any. The intellectual #
|
||||
# and technical concepts contained herein are proprietary to gempa GmbH #
|
||||
# and its suppliers. #
|
||||
# Dissemination of this information or reproduction of this material #
|
||||
# is strictly forbidden unless prior written permission is obtained #
|
||||
# from gempa GmbH. #
|
||||
############################################################################
|
||||
|
||||
import numpy as np
|
||||
|
||||
|
||||
from gempa import CAPS
|
||||
|
||||
|
||||
def calculateAbsPerc(grid, percentile=99.9):
|
||||
grid_array = np.array(grid)
|
||||
result = np.percentile(np.abs(grid_array), percentile)
|
||||
return result
|
||||
|
||||
|
||||
def parseTime(s):
|
||||
formats = ["%F", "%F %T", "%F %T.%Z", "%FT%T", "%FT%T.%Z"]
|
||||
for fmt in formats:
|
||||
time = CAPS.Time.FromString(s, fmt)
|
||||
if time.valid():
|
||||
return time
|
||||
|
||||
return None
|
||||
0
lib/python/nettab/__init__.py
Normal file
0
lib/python/nettab/__init__.py
Normal file
366
lib/python/nettab/basesc3.py
Normal file
366
lib/python/nettab/basesc3.py
Normal file
@ -0,0 +1,366 @@
|
||||
from __future__ import print_function
|
||||
import seiscomp.datamodel, seiscomp.core, seiscomp.config
|
||||
from .helpers import parsers
|
||||
import datetime
|
||||
import sys
|
||||
|
||||
|
||||
class sc3(object):
|
||||
def _fillSc3(self, obj, att):
|
||||
commentNum = 0
|
||||
for (k, p) in att.items():
|
||||
try:
|
||||
if k == 'Comment':
|
||||
# print('DEBUG: Adding comment', p)
|
||||
if p.startswith('Grant'):
|
||||
# 2020: These belong in DOI metadata, not here.
|
||||
continue
|
||||
|
||||
c = seiscomp.datamodel.Comment()
|
||||
c.setText(p)
|
||||
c.setId(str(commentNum))
|
||||
commentNum += 1
|
||||
obj.add(c)
|
||||
continue
|
||||
|
||||
if k == 'Pid':
|
||||
# print('DEBUG: Adding Pid as comment', p)
|
||||
c = seiscomp.datamodel.Comment()
|
||||
(typ, val) = p.split(':', 1)
|
||||
s = '{"type":"%s", "value":"%s"}' % (typ.upper(), val)
|
||||
c.setText(s)
|
||||
c.setId('FDSNXML:Identifier/' + str(commentNum))
|
||||
commentNum += 1
|
||||
obj.add(c)
|
||||
continue
|
||||
|
||||
w = 'set' + k
|
||||
p = self.sc3Valid['attributes'][k]['validator'](p)
|
||||
getattr(obj, w)(p)
|
||||
except Exception as e:
|
||||
print("[Error] %s = %s (%s)" % (k, p, e),
|
||||
file=sys.stderr)
|
||||
|
||||
@staticmethod
|
||||
def getBool(val):
|
||||
if val == "True" or val == 1:
|
||||
return True
|
||||
elif val == "False" or val == 0:
|
||||
return False
|
||||
else:
|
||||
raise Exception("Invalid Boolean Value")
|
||||
|
||||
@staticmethod
|
||||
def getString(data):
|
||||
return data.strip()
|
||||
|
||||
@staticmethod
|
||||
def getRealArray(data):
|
||||
RA = seiscomp.datamodel.RealArray()
|
||||
for r in map(float, data):
|
||||
RA.content().push_back(r)
|
||||
return RA
|
||||
|
||||
@staticmethod
|
||||
def getComplexArray(data):
|
||||
CA = seiscomp.datamodel.ComplexArray()
|
||||
for (r,i) in data:
|
||||
CA.content().push_back(complex(float(r),float(i)))
|
||||
return CA
|
||||
|
||||
@staticmethod
|
||||
def getDate(value):
|
||||
if isinstance(value, datetime.datetime):
|
||||
return seiscomp.core.Time(*(value.timetuple()[:6]))
|
||||
elif isinstance(value, str):
|
||||
value = parsers.parseDate(value)
|
||||
return seiscomp.core.Time(*(value.timetuple()[:6]))
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
def getBlob(value):
|
||||
b = seiscomp.datamodel.Blob()
|
||||
b.setContent(value)
|
||||
return b
|
||||
|
||||
@staticmethod
|
||||
def getStationGroupType(val):
|
||||
if val == "ARRAY":
|
||||
return seiscomp.datamodel.ARRAY
|
||||
elif val == "DEPLOYMENT":
|
||||
return seiscomp.datamodel.DEPLOYMENT
|
||||
else:
|
||||
raise Exception("Invalid station group type")
|
||||
|
||||
@staticmethod
|
||||
def _findValidOnes(mode):
|
||||
valid = {
|
||||
'dataloggerCalibration': {
|
||||
'creator': seiscomp.datamodel.DataloggerCalibration,
|
||||
'attributes': {
|
||||
'SerialNumber': { 'validator': sc3.getString },
|
||||
'Channel': { 'validator': int },
|
||||
'Start': { 'validator': sc3.getDate },
|
||||
'End': { 'validator': sc3.getDate },
|
||||
'Gain': { 'validator': float },
|
||||
'GainFrequency': { 'validator': float },
|
||||
'Remark': { 'validator': sc3.getBlob }
|
||||
}
|
||||
},
|
||||
'sensorCalibration': {
|
||||
'creator': seiscomp.datamodel.SensorCalibration,
|
||||
'attributes': {
|
||||
'SerialNumber': { 'validator': sc3.getString },
|
||||
'Channel': { 'validator': int },
|
||||
'Start': { 'validator': sc3.getDate },
|
||||
'End': { 'validator': sc3.getDate },
|
||||
'Gain': { 'validator': float },
|
||||
'GainFrequency': { 'validator': float },
|
||||
'Remark': { 'validator': sc3.getBlob }
|
||||
}
|
||||
},
|
||||
'channel': {
|
||||
'creator': seiscomp.datamodel.Stream_Create,
|
||||
'attributes': {
|
||||
'Code': { 'validator': sc3.getString },
|
||||
'Start': { 'validator': sc3.getDate },
|
||||
'End': { 'validator': sc3.getDate },
|
||||
'Datalogger': { 'validator': sc3.getString },
|
||||
'DataloggerSerialNumber': { 'validator': sc3.getString },
|
||||
'DataloggerChannel': { 'validator': int },
|
||||
'Sensor': { 'validator': sc3.getString },
|
||||
'SensorSerialNumber': { 'validator': sc3.getString },
|
||||
'SensorChannel': { 'validator': int },
|
||||
'ClockSerialNumber': { 'validator': sc3.getString },
|
||||
'SampleRateNumerator': { 'validator': int },
|
||||
'SampleRateDenominator': { 'validator': int },
|
||||
'Depth': { 'validator': float },
|
||||
'Azimuth': { 'validator': float },
|
||||
'Dip': { 'validator': float },
|
||||
'Gain': { 'validator': float },
|
||||
'GainFrequency': { 'validator': float },
|
||||
'GainUnit': { 'validator': sc3.getString },
|
||||
'Format': { 'validator': sc3.getString },
|
||||
'Flags': { 'validator': sc3.getString },
|
||||
'Restricted': { 'validator': sc3.getBool },
|
||||
'Shared': { 'validator': sc3.getBool }
|
||||
}
|
||||
},
|
||||
'location': {
|
||||
'creator': seiscomp.datamodel.SensorLocation_Create,
|
||||
'attributes': {
|
||||
'Code': { 'validator': sc3.getString },
|
||||
'Start': { 'validator': sc3.getDate },
|
||||
'End': { 'validator': sc3.getDate },
|
||||
"Latitude": { 'validator': float },
|
||||
"Longitude": { 'validator': float },
|
||||
"Elevation": { 'validator': float }
|
||||
}
|
||||
},
|
||||
'station': {
|
||||
'creator': seiscomp.datamodel.Station_Create,
|
||||
'attributes': {
|
||||
'Code': { 'validator': sc3.getString },
|
||||
'Start': { 'validator': sc3.getDate },
|
||||
'End': { 'validator': sc3.getDate },
|
||||
'Description': { 'validator': sc3.getString },
|
||||
'Latitude': { 'validator': float },
|
||||
'Longitude': { 'validator': float },
|
||||
'Elevation': { 'validator': float },
|
||||
'Place': { 'validator': sc3.getString },
|
||||
'Country': { 'validator': sc3.getString },
|
||||
'Affiliation': { 'validator': sc3.getString },
|
||||
'Type': { 'validator': sc3.getString },
|
||||
'ArchiveNetworkCode': { 'validator': sc3.getString },
|
||||
'Archive': { 'validator': sc3.getString },
|
||||
'Restricted': { 'validator': sc3.getBool },
|
||||
'Shared': { 'validator': sc3.getBool },
|
||||
'Remark': { 'validator': sc3.getBlob }
|
||||
}
|
||||
},
|
||||
'network': {
|
||||
'creator': seiscomp.datamodel.Network_Create,
|
||||
'attributes': {
|
||||
'Code': { 'validator': sc3.getString },
|
||||
'Start': { 'validator': sc3.getDate },
|
||||
'End': { 'validator': sc3.getDate },
|
||||
'Description': { 'validator': sc3.getString },
|
||||
'Institutions': { 'validator': sc3.getString },
|
||||
'Region': { 'validator': sc3.getString },
|
||||
'Type': { 'validator': sc3.getString },
|
||||
'NetClass': { 'validator': sc3.getString },
|
||||
'Archive': { 'validator': sc3.getString },
|
||||
'Comment': { 'validator': sc3.getString },
|
||||
'Pid': { 'validator': sc3.getBlob },
|
||||
'Restricted': { 'validator': sc3.getBool },
|
||||
'Shared': { 'validator': sc3.getBool },
|
||||
'Remark': { 'validator': sc3.getBlob }
|
||||
}
|
||||
},
|
||||
'stationGroup': {
|
||||
'creator': seiscomp.datamodel.StationGroup_Create,
|
||||
'attributes': {
|
||||
'Code': { 'validator': sc3.getString },
|
||||
'Start': { 'validator': sc3.getDate },
|
||||
'End': { 'validator': sc3.getDate },
|
||||
'Description': { 'validator': sc3.getString },
|
||||
'Type': { 'validator': sc3.getStationGroupType },
|
||||
'Latitude': { 'validator': float },
|
||||
'Longitude': { 'validator': float },
|
||||
'Elevation': { 'validator': float },
|
||||
}
|
||||
},
|
||||
'stationReference': {
|
||||
'creator': seiscomp.datamodel.StationReference,
|
||||
'attributes': {
|
||||
'StationID': { 'validator': sc3.getString },
|
||||
}
|
||||
},
|
||||
'datalogger': {
|
||||
'creator': seiscomp.datamodel.Datalogger_Create,
|
||||
'attributes': {
|
||||
'Name': { 'validator': sc3.getString },
|
||||
'Description': { 'validator': sc3.getString },
|
||||
'DigitizerModel': { 'validator': sc3.getString },
|
||||
'DigitizerManufacturer': { 'validator': sc3.getString },
|
||||
'RecorderModel': { 'validator': sc3.getString },
|
||||
'RecorderManufacturer': { 'validator': sc3.getString },
|
||||
'ClockModel': { 'validator': sc3.getString },
|
||||
'ClockManufacturer': { 'validator': sc3.getString },
|
||||
'ClockType': { 'validator': sc3.getString },
|
||||
'Gain': { 'validator': float },
|
||||
'MaxClockDrift': { 'validator': float },
|
||||
'Remark': { 'validator': sc3.getBlob }
|
||||
}
|
||||
},
|
||||
'decimation': {
|
||||
'creator': seiscomp.datamodel.Decimation,
|
||||
'attributes': {
|
||||
'SampleRateNumerator': { 'validator': int },
|
||||
'SampleRateDenominator': { 'validator': int },
|
||||
'AnalogueFilterChain': { 'validator': sc3.getBlob },
|
||||
'DigitalFilterChain': { 'validator': sc3.getBlob }
|
||||
}
|
||||
},
|
||||
'fir': {
|
||||
'creator': seiscomp.datamodel.ResponseFIR_Create,
|
||||
'attributes': {
|
||||
"Name": { 'validator': sc3.getString },
|
||||
"Gain": { 'validator': float },
|
||||
"DecimationFactor": { 'validator': int },
|
||||
"Delay": { 'validator': float },
|
||||
"Correction": { 'validator': float },
|
||||
"NumberOfCoefficients": { 'validator': int },
|
||||
"Symmetry": { 'validator': sc3.getString },
|
||||
"Coefficients": { 'validator': sc3.getRealArray },
|
||||
"Remarks": { 'validator': sc3.getBlob }
|
||||
}
|
||||
},
|
||||
'paz': {
|
||||
'creator': seiscomp.datamodel.ResponsePAZ_Create,
|
||||
'attributes': {
|
||||
'Name': { 'validator': sc3.getString },
|
||||
'Description': { 'validator': sc3.getString },
|
||||
'Type': { 'validator': sc3.getString },
|
||||
'Gain': { 'validator': float },
|
||||
'GainFrequency': { 'validator': float },
|
||||
'NormalizationFactor': { 'validator': float },
|
||||
'NormalizationFrequency': { 'validator': float },
|
||||
'NumberOfZeros': { 'validator': int },
|
||||
'NumberOfPoles': { 'validator': int },
|
||||
'Zeros': { 'validator': sc3.getComplexArray },
|
||||
'Poles': { 'validator': sc3.getComplexArray },
|
||||
'Remark': { 'validator': sc3.getBlob }
|
||||
}
|
||||
},
|
||||
'sensor': {
|
||||
'creator': seiscomp.datamodel.Sensor_Create,
|
||||
'attributes': {
|
||||
'Name': { 'validator': sc3.getString },
|
||||
'Description': { 'validator': sc3.getString },
|
||||
'Model': { 'validator': sc3.getString },
|
||||
'Manufacturer': { 'validator': sc3.getString },
|
||||
'Type': { 'validator': sc3.getString },
|
||||
'Unit': { 'validator': sc3.getString },
|
||||
'LowFrequency': { 'validator': float },
|
||||
'HighFrequency': { 'validator': float },
|
||||
'Response': { 'validator': sc3.getString },
|
||||
'Remark': { 'validator': sc3.getBlob }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return(valid.get(mode))
|
||||
|
||||
def __init__(self, mode, child=[]):
|
||||
self.sc3Mode = mode
|
||||
self.sc3obj = None
|
||||
self.sc3Valid = sc3._findValidOnes(mode)
|
||||
self._sc3Childs = child
|
||||
|
||||
def _create(self):
|
||||
if not self.sc3Valid:
|
||||
raise Exception("Class without a type defined.")
|
||||
return self.sc3Valid['creator']()
|
||||
|
||||
def sc3Att(self):
|
||||
"""
|
||||
This is the heart. You should return an dictionary of attributes to be
|
||||
setted on the sc3 object. This dictionary will be used by the _fillSc3
|
||||
method.
|
||||
"""
|
||||
raise Exception("Not Implemented !")
|
||||
|
||||
def sc3ValidKey(self, key):
|
||||
if not self.sc3Valid:
|
||||
raise Exception("Class without a type defined.")
|
||||
return (key in self.sc3Valid['attributes'])
|
||||
|
||||
def sc3Resolv(self, inventory):
|
||||
"""
|
||||
In this method you should be able to resolv all the references in your
|
||||
self object.
|
||||
"""
|
||||
pass
|
||||
|
||||
def sc3Derived(self, inventory):
|
||||
"""
|
||||
This method should generate and collect all the derived objects
|
||||
(child on the inventory sense) that should be attributed to the self
|
||||
object. By default on this virtual method is returns an empty array.
|
||||
"""
|
||||
objs = []
|
||||
for obj in self._sc3Childs:
|
||||
objs.append(obj.sc3Obj(inventory))
|
||||
return objs
|
||||
|
||||
def sc3ID(self, inventory):
|
||||
obj = self.sc3Obj(inventory)
|
||||
return obj.publicID()
|
||||
|
||||
def sc3Obj(self, inventory):
|
||||
if not self.sc3obj:
|
||||
# Get a new object
|
||||
obj = self._create()
|
||||
|
||||
# try to resolve REFERENCES to PUBLIC ID
|
||||
self.sc3Resolv(inventory)
|
||||
|
||||
# Add the derived objects in
|
||||
for dobj in self.sc3Derived(inventory):
|
||||
obj.add(dobj)
|
||||
|
||||
# Fill the Attributes in
|
||||
self._fillSc3(obj, self.sc3Att())
|
||||
# # Only want to see Networks:
|
||||
# if (('Code' in self.sc3Att().keys())
|
||||
# and ('ArchiveNetworkCode' not in self.sc3Att().keys())
|
||||
# and ('Azimuth' not in self.sc3Att().keys())
|
||||
# ):
|
||||
# print('DEBUG basesc3.py: sc3Obj:', self, self.sc3Att())
|
||||
|
||||
# Set as created
|
||||
self.sc3obj = obj
|
||||
|
||||
# return the obj
|
||||
return self.sc3obj
|
||||
506
lib/python/nettab/convertUtils.py
Normal file
506
lib/python/nettab/convertUtils.py
Normal file
@ -0,0 +1,506 @@
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import csv
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
def getFieldNames(fd):
|
||||
tmp = fd.readline().split(',')
|
||||
fieldNames = []
|
||||
for i in tmp:
|
||||
fieldNames.append(i.strip())
|
||||
return fieldNames
|
||||
|
||||
def quote(instr):
|
||||
return '"'+instr+'"'
|
||||
|
||||
def hummanStr(instr):
|
||||
return instr.replace("_"," ")
|
||||
|
||||
def parseDate(val):
|
||||
if not val or val == "":
|
||||
return None
|
||||
date=val.replace("/", "-")
|
||||
formats={ len("YYYY-JJJ") : "%Y-%j",
|
||||
len("YYYY-MM-DD") : "%Y-%m-%d",
|
||||
len("YYYY-JJJ:HHMM") : "%Y-%j:%H%M",
|
||||
len("YYYY-JJJTHH:MM") : "%Y-%jT%H:%M",
|
||||
len("YYYY-MM-DDTHH:MM") : "%Y-%m-%dT%H:%M",
|
||||
len("YYYY-JJJTHH:MM:SS") : "%Y-%jT%H:%M:%S",
|
||||
len("YYYY-MM-DDTHH:MM:SS") : "%Y-%m-%dT%H:%M:%S"}
|
||||
try:
|
||||
return datetime.strptime(date, formats[len(date)])
|
||||
except Exception as e:
|
||||
raise ValueError("invalid date: " + date + str(e))
|
||||
|
||||
def formatDate(date):
|
||||
if not date:
|
||||
return ""
|
||||
|
||||
if date.hour != 0 or date.minute != 0:
|
||||
return datetime.strftime(date,"%Y/%j:%H%M")
|
||||
|
||||
return datetime.strftime(date,"%Y/%j")
|
||||
|
||||
def isPyVersion(major, minor):
|
||||
return sys.version_info[0] == major and \
|
||||
sys.version_info[1] == minor
|
||||
|
||||
class StationMappings:
|
||||
def __init__(self, networkCode, stationList, filename):
|
||||
self.networkCode = networkCode
|
||||
self.stationList = stationList
|
||||
self.stationMapping = {}
|
||||
self.stationBreak = {}
|
||||
|
||||
if not filename: return
|
||||
_rx_statmap = re.compile(r'\s*([^_]*)_([^=]*)=(\S*)\s*(from=([0-9]+/[0-9]+))?\s*(to=([0-9]+/[0-9]+))?\s*$')
|
||||
fd = open(filename)
|
||||
stationMapping = {}
|
||||
try:
|
||||
lineno = 0
|
||||
try:
|
||||
line = fd.readline()
|
||||
lineno = 1
|
||||
while line:
|
||||
m = _rx_statmap.match(line)
|
||||
if m is None:
|
||||
raise Exception("parse error")
|
||||
|
||||
(sta, net, archive_net, from_def, from_year, to_def, to_year) = m.groups()
|
||||
|
||||
if net != self.networkCode:
|
||||
line = fd.readline()
|
||||
continue
|
||||
|
||||
if sta not in self.stationList:
|
||||
line = fd.readline()
|
||||
continue
|
||||
|
||||
try:
|
||||
sta_net = stationMapping[sta]
|
||||
|
||||
except KeyError:
|
||||
sta_net = []
|
||||
stationMapping[sta] = sta_net
|
||||
|
||||
if from_def:
|
||||
from_date = parseDate(from_year)
|
||||
|
||||
else:
|
||||
from_date = None
|
||||
|
||||
if to_def:
|
||||
to_date = parseDate(to_year)
|
||||
|
||||
else:
|
||||
to_date = None
|
||||
|
||||
sta_net.append((from_date, to_date, archive_net))
|
||||
line = fd.readline()
|
||||
lineno += 1
|
||||
|
||||
except (Exception, TypeError, ValueError) as e:
|
||||
raise Exception("%s:%d: %s" % (file, lineno, str(e)))
|
||||
|
||||
finally:
|
||||
fd.close()
|
||||
|
||||
if len(stationMapping):
|
||||
print("Found %d station mappings" % len(stationMapping), file=sys.stderr)
|
||||
self.stationMapping = stationMapping
|
||||
else:
|
||||
## print("No station mappings found", file=sys.stderr)
|
||||
pass
|
||||
|
||||
def dump(self, fdo, stationCode):
|
||||
items = []
|
||||
for (code, mapping) in self.stationMapping.items():
|
||||
if stationCode and stationCode != code: continue
|
||||
items.append(code)
|
||||
for (fromDate, toDate, network) in mapping:
|
||||
fdo.write("Sa: ArchiveNetworkCode=%s %s" % (network, code))
|
||||
if fromDate:
|
||||
fdo.write(" from=%s" % formatDate(fromDate))
|
||||
if toDate:
|
||||
fdo.write(" to=%s" % formatDate(toDate))
|
||||
fdo.write("\n")
|
||||
|
||||
for code in items:
|
||||
self.stationMapping.pop(code)
|
||||
|
||||
def getMappings(self, code, start, end):
|
||||
mapping = []
|
||||
|
||||
if (code, start, end) not in self.stationBreak:
|
||||
mapping.append([start, end])
|
||||
else:
|
||||
for (archiveNet, s, e, fr, to) in self.stationBreak[(code, start, end)]:
|
||||
mapping.append([s, e])
|
||||
|
||||
return mapping
|
||||
|
||||
def parseStationLine(self, items):
|
||||
stationCode = items[0].strip()
|
||||
start = parseDate(items[10])
|
||||
|
||||
if len(items) > 11:
|
||||
end = parseDate(items[11])
|
||||
else:
|
||||
end = None
|
||||
|
||||
if stationCode not in self.stationMapping:
|
||||
## print("Skipping %s not in mapping list" % stationCode, file=sys.stderr)
|
||||
return self.getMappings(stationCode, start, end)
|
||||
|
||||
for (fDate, tDate, archiveNet) in self.stationMapping[stationCode]:
|
||||
if fDate and tDate:
|
||||
raise Exception("Not Supported to and from definitions found.")
|
||||
elif fDate:
|
||||
if fDate >= start:
|
||||
if (end and fDate <= end) or not end:
|
||||
## print("Processing fDate %s %s %s [%s]" % (stationCode, start, end, fDate), file=sys.stderr)
|
||||
if (stationCode, start, end) in self.stationBreak:
|
||||
raise Exception("Crazy multiple station mapping for the same station line")
|
||||
self.stationBreak[(stationCode, start, end)] = []
|
||||
self.stationBreak[(stationCode, start, end)].append((self.networkCode, start, fDate, fDate, tDate))
|
||||
self.stationBreak[(stationCode, start, end)].append((archiveNet, fDate, end, fDate, tDate))
|
||||
## prin( " found mapping From -> %s (%s,%s)" % (fDate, stationCode, formatDate(start)), file=sys.stderr)
|
||||
return self.getMappings(stationCode, start, end)
|
||||
elif tDate:
|
||||
if tDate >= start:
|
||||
if (end and tDate <= end) or not end:
|
||||
## print("Processing tDate %s %s %s [%s]" % (stationCode, start, end, tDate), file=sys.stderr)
|
||||
if (stationCode, start, end) in self.stationBreak:
|
||||
raise Exception("Crazy multiple station mapping for the same station line")
|
||||
self.stationBreak[(stationCode, start, end)] = []
|
||||
self.stationBreak[(stationCode, start, end)].append((archiveNet, start, tDate, fDate, tDate))
|
||||
self.stationBreak[(stationCode, start, end)].append((self.networkCode, tDate, end, fDate, tDate))
|
||||
## print(" found mapping To -> %s (%s,%s)" % (tDate, stationCode, formatDate(start)), file=sys.stderr)
|
||||
return self.getMappings(stationCode, start, end)
|
||||
else:
|
||||
if (stationCode, start, end) in self.stationBreak:
|
||||
raise Exception("Crazy multiple station mapping for the same station line")
|
||||
self.stationBreak[(stationCode, start, end)] = []
|
||||
self.stationBreak[(stationCode, start, end)].append((archiveNet, start, end, fDate, tDate))
|
||||
## print(" found mapping ALL (%s,%s)" % (stationCode, formatDate(start)), file=sys.stderr)
|
||||
return self.getMappings(stationCode, start, end)
|
||||
## print("Ignored %s" % " ".join(items), file=sys.stderr)
|
||||
return self.getMappings(stationCode, start, end)
|
||||
|
||||
class StationAttributes:
|
||||
def __init__(self, networkCode, stationList, filename):
|
||||
self.networkCode= networkCode
|
||||
self.stationList = stationList
|
||||
self.stationAttributeList = {}
|
||||
|
||||
if not filename: return
|
||||
|
||||
fd = open(filename)
|
||||
attributes = {}
|
||||
try:
|
||||
try:
|
||||
fieldNames = None
|
||||
if isPyVersion(2, 3):
|
||||
fieldNames = getFieldNames(fd)
|
||||
|
||||
for row in csv.DictReader(fd, fieldNames):
|
||||
net_code = row['net_code']
|
||||
if net_code != self.networkCode: continue
|
||||
|
||||
sta_code = row['sta_code']
|
||||
if sta_code not in self.stationList: continue
|
||||
|
||||
start = parseDate(row['start'].strip())
|
||||
|
||||
if sta_code in attributes:
|
||||
raise Exception("multiple %s found in %s" % (str((net_code, sta_code, row['start'])), filename))
|
||||
|
||||
del row['net_code']
|
||||
del row['sta_code']
|
||||
del row['start']
|
||||
|
||||
## Clean up input
|
||||
for key in ['restricted', 'restricted_exc', 'place', 'country', 'affiliation', 'remark']:
|
||||
row[key] = row[key].strip()
|
||||
if len(row[key]) == 0:
|
||||
del row[key]
|
||||
|
||||
if 'restricted' in row:
|
||||
row['restricted'] = bool(int(row['restricted']))
|
||||
if not row['restricted']: del (row['restricted'])
|
||||
|
||||
if row:
|
||||
attributes[sta_code] = row
|
||||
|
||||
except KeyError as e:
|
||||
raise Exception("column %s missing in %s" % (str(e), filename))
|
||||
|
||||
except (TypeError, ValueError) as e:
|
||||
raise Exception("error reading %s: %s" % (filename, str(e)))
|
||||
|
||||
finally:
|
||||
fd.close()
|
||||
self.stationAttributeList = self.__build__(attributes)
|
||||
print(" loaded attributes for %d stations on network %s (%s)" % (len(self.stationAttributeList), self.networkCode, filename), file=sys.stderr)
|
||||
|
||||
def __build__(self, attributes):
|
||||
newat = {}
|
||||
|
||||
if not attributes:
|
||||
## print("no station attributes found for network %s" % self.networkCode, file=sys.stderr)
|
||||
return newat
|
||||
|
||||
for (code,row) in attributes.items():
|
||||
nr = {}
|
||||
for (k,v) in row.items():
|
||||
if k == 'country': k = 'Country'
|
||||
if k == 'place': k = 'Place'
|
||||
if k == 'affiliation': k = 'Affiliation'
|
||||
if k == 'remark': k = 'Remark'
|
||||
if k == 'restricted': k = 'Restricted'
|
||||
nr[k] = v
|
||||
if nr:
|
||||
newat[code] = nr
|
||||
return newat
|
||||
|
||||
def get(self, code):
|
||||
if self.stationAttributeList and code in self.stationAttributeList:
|
||||
return self.stationAttributeList[code]
|
||||
else:
|
||||
return None
|
||||
|
||||
def __parseDescription__(self, description):
|
||||
affiliation = None
|
||||
place = None
|
||||
country = None
|
||||
description = hummanStr(description)
|
||||
hasStation = True if description.find("Station") >= 0 else False
|
||||
|
||||
if hasStation:
|
||||
affiliation = description[0:(description.index("Station"))].strip()
|
||||
parts = description[description.index("Station")+7:].strip().split(",")
|
||||
else:
|
||||
parts = description.split(",")
|
||||
|
||||
if len(parts) > 1:
|
||||
country = parts[len(parts)-1].strip()
|
||||
parts = parts[0:(len(parts)-1)]
|
||||
place = ",".join(parts)
|
||||
else:
|
||||
place = ",".join(parts)
|
||||
|
||||
# print("Country:", country, file=sys.stderr)
|
||||
# print("Place:", place, file=sys.stderr)
|
||||
# print("Affiliation:", affiliation, file=sys.stderr)
|
||||
|
||||
oui = {}
|
||||
if country:
|
||||
oui['Country'] = country
|
||||
if place:
|
||||
oui['Place'] = place
|
||||
if affiliation:
|
||||
oui['Affiliation'] = affiliation
|
||||
return oui
|
||||
|
||||
def reorder_station_attr(self):
|
||||
att = {}
|
||||
if not self.stationAttributeList:
|
||||
return None
|
||||
|
||||
for (code, row) in self.stationAttributeList.items():
|
||||
for (k, v) in row.items():
|
||||
if k == 'restricted_exc':
|
||||
k = 'Restricted'
|
||||
extra=',*,'+str(v)
|
||||
v = (not row['Restricted']) if 'Restricted' in row else True
|
||||
else:
|
||||
extra= ''
|
||||
|
||||
try:
|
||||
dk = att[k]
|
||||
except:
|
||||
dk = {}
|
||||
att[k] = dk
|
||||
|
||||
try:
|
||||
dv = dk[str(v)]
|
||||
except:
|
||||
dv = []
|
||||
dk[str(v)] = dv
|
||||
|
||||
dv.append(code+extra)
|
||||
return att
|
||||
|
||||
def parseStationLine(self, items, fStart = None, fEnd = None):
|
||||
stationCode = items[0].strip()
|
||||
description = items[1]
|
||||
start = parseDate(items[10])
|
||||
if stationCode not in self.stationList:
|
||||
raise Exception("Station %s not in station list." % stationCode)
|
||||
|
||||
## Here we can force a different start & End values to the line
|
||||
if fStart is not None:
|
||||
start = fStart
|
||||
|
||||
if fEnd is not None:
|
||||
end = fEnd
|
||||
|
||||
oui = None
|
||||
at = self.get(stationCode)
|
||||
#print >>sys.stderr,items, at, file=sys.stderr)
|
||||
if not at:
|
||||
## print(" Deriving attributes from description %s " % " ".join(items), file=sys.stderr)
|
||||
at = self.__parseDescription__(description)
|
||||
if at:
|
||||
self.stationAttributeList[stationCode] = at
|
||||
else:
|
||||
for item in ['Affiliation', 'Country', 'Place']:
|
||||
if item in at:
|
||||
continue
|
||||
if not oui:
|
||||
## print(" Deriving attribute (%s) from description %s " % (item, " ".join(items)), file=sys.stderr)
|
||||
oui = self.__parseDescription__(description)
|
||||
if item in oui:
|
||||
## print(" Setting attribute (%s) from description for %s = %s" % (item, stationCode, oui[item]), file=sys.stderr)
|
||||
at[item] = oui[item]
|
||||
else:
|
||||
## print(" Empty %s for %s" % (item, stationCode), file=sys.stderr)
|
||||
pass
|
||||
|
||||
country = at['Country'] if 'Country' in at else None
|
||||
place = at['Place'] if 'Place' in at else None
|
||||
return [place, country]
|
||||
|
||||
def dump(self, fdo, code):
|
||||
if not code:
|
||||
att = self.reorder_station_attr()
|
||||
for (key,v) in att.items():
|
||||
if key in ['Country', 'Place']: continue
|
||||
for (value, s) in v.items():
|
||||
fdo.write("Sa: %s=%s" % (key, quote(value)))
|
||||
for station in s:
|
||||
fdo.write(" %s" % (station))
|
||||
fdo.write("\n")
|
||||
else:
|
||||
at = self.get(code)
|
||||
if not at: return
|
||||
if 'done' in at: return
|
||||
at['done'] = 1 # Mark the item as printed
|
||||
for (k,v) in at.items():
|
||||
extra = ''
|
||||
if k in [ 'done', 'Place', 'Country']: continue
|
||||
if k in ['Affiliation']: v = quote(v)
|
||||
|
||||
if k == 'Restricted':
|
||||
extra = ' %s,*,*' % code
|
||||
|
||||
if k == 'restricted_exc':
|
||||
k = 'Restricted'
|
||||
extra=',*,'+str(v)
|
||||
v = (not at['Restricted']) if 'Restricted' in at else True
|
||||
|
||||
|
||||
fdo.write("Sa: %s=%s %s%s\n" % (k,v,code,extra))
|
||||
|
||||
class NetworkAttributes:
|
||||
def __build__(self, row):
|
||||
#net_code,start,end,restricted,shared,net_class,type,institutions,region,remark
|
||||
|
||||
attList = {}
|
||||
|
||||
if row['start']:
|
||||
self.start = row['start'].strftime("%Y/%j")
|
||||
self.startDate = row['start']
|
||||
self.hasStart = True
|
||||
|
||||
if row['end']:
|
||||
self.end = row['end'].strftime("%Y/%j")
|
||||
self.endDate = row['end']
|
||||
self.hasEnd = True
|
||||
|
||||
if row['restricted'] != 0:
|
||||
attList['Restricted'] = row['restricted']
|
||||
|
||||
if row['shared'] != 1:
|
||||
attList['Shared'] = row['shared']
|
||||
|
||||
if row['net_class']:
|
||||
attList['NetClass'] = row['net_class'].strip()
|
||||
|
||||
if row['type']:
|
||||
attList['Type'] = row['type'].strip()
|
||||
|
||||
if row['institutions']:
|
||||
attList['Institutions'] = row['institutions'].strip()
|
||||
|
||||
if row['region']:
|
||||
attList['Region'] = row['region'].strip()
|
||||
|
||||
if row['remark']:
|
||||
attList['Remark'] = row['remark'].strip()
|
||||
|
||||
self.networkAttributes.update(attList)
|
||||
|
||||
def parseNetworkLine(self, items):
|
||||
if len(items) < 4 or len(items) > 6:
|
||||
raise Exception("Invalid network line")
|
||||
|
||||
attList = {}
|
||||
if items[1] == "none":
|
||||
attList['Description'] = hummanStr(items[0])
|
||||
else:
|
||||
attList['Description'] = "%s (%s)" % (hummanStr(items[0]), items[1])
|
||||
|
||||
self.networkAttributes.update(attList)
|
||||
|
||||
def dump(self, fdo):
|
||||
for (k,v) in self.networkAttributes.items():
|
||||
if k in ['Description', 'Remark', 'Region', 'Institutions']:
|
||||
v = quote(v)
|
||||
fdo.write("Na: %s=%s\n" % (k,v))
|
||||
|
||||
def __init__(self, networkCode, filename):
|
||||
self.networkCode = networkCode
|
||||
self.networkAttributes = {}
|
||||
|
||||
self.start = None
|
||||
self.end = None
|
||||
|
||||
self.hasStart = False
|
||||
self.hasEnd = False
|
||||
|
||||
if not filename: return
|
||||
fd = open(filename)
|
||||
try:
|
||||
try:
|
||||
fieldNames = None
|
||||
if isPyVersion(2, 3):
|
||||
fieldNames = getFieldNames(fd)
|
||||
|
||||
for row in csv.DictReader(fd, fieldNames):
|
||||
net_code = row['net_code']
|
||||
if net_code != self.networkCode: continue
|
||||
|
||||
#del row['net_code']
|
||||
#del row['start']
|
||||
row['start'] = parseDate(row['start'])
|
||||
row['end'] = parseDate(row['end'])
|
||||
row['restricted'] = bool(int(row['restricted']))
|
||||
row['shared'] = bool(int(row['shared']))
|
||||
row['region'] = row['region'].strip()
|
||||
row['remark'] = row['remark'].strip()
|
||||
row['institutions'] = row['institutions'].strip()
|
||||
|
||||
self.__build__(row)
|
||||
break
|
||||
|
||||
except KeyError as e:
|
||||
raise Exception("column %s missing in %s" % (str(e), filename))
|
||||
|
||||
except (TypeError, ValueError) as e:
|
||||
raise Exception("error reading %s: %s" % (filename, str(e)))
|
||||
|
||||
finally:
|
||||
fd.close()
|
||||
print(" found %d Attribute for network %s (%s)" % (len(self.networkAttributes), self.networkCode, filename), file=sys.stderr)
|
||||
160
lib/python/nettab/helpers.py
Normal file
160
lib/python/nettab/helpers.py
Normal file
@ -0,0 +1,160 @@
|
||||
import re
|
||||
from datetime import datetime
|
||||
import string
|
||||
from functools import reduce
|
||||
|
||||
class parsers(object):
|
||||
|
||||
@staticmethod
|
||||
def parseString(val):
|
||||
return val.strip()
|
||||
|
||||
@staticmethod
|
||||
def _parse_paz(npaz, s):
|
||||
_rx_paz = re.compile(r'\s*([0-9]*)\(\s*([^,]+),\s*([^)]+)\)\s*')
|
||||
pos = 0
|
||||
n = 0
|
||||
c = []
|
||||
while pos < len(s):
|
||||
m = _rx_paz.match(s, pos)
|
||||
if m is None:
|
||||
raise Exception("error parsing PAZ at '" + s[pos:] + "'")
|
||||
|
||||
try:
|
||||
if len(m.group(1)) > 0:
|
||||
x = int(m.group(1))
|
||||
else:
|
||||
x = 1
|
||||
|
||||
rv = m.group(2)
|
||||
iv = m.group(3)
|
||||
|
||||
float(rv)
|
||||
float(iv)
|
||||
|
||||
except ValueError:
|
||||
raise Exception("error parsing PAZ at '" + s[pos:] + "'")
|
||||
|
||||
for i in range(0, x):
|
||||
c.append((rv, iv))
|
||||
i = i
|
||||
|
||||
n += x
|
||||
pos = m.end()
|
||||
|
||||
if n != npaz:
|
||||
raise Exception("expected %d PAZ, found %d" % (npaz, n))
|
||||
return c
|
||||
|
||||
@staticmethod
|
||||
def _normalize(num, denom):
|
||||
if num > denom:
|
||||
(a, b) = (num, denom)
|
||||
else:
|
||||
(a, b) = (denom, num)
|
||||
|
||||
while b > 1:
|
||||
(a, b) = (b, a % b)
|
||||
|
||||
if b == 0:
|
||||
return (num / a, denom / a)
|
||||
|
||||
return (num, denom)
|
||||
|
||||
@staticmethod
|
||||
def _rational(x):
|
||||
sign, mantissa, exponent = x.as_tuple()
|
||||
sign = (1, -1)[sign]
|
||||
mantissa = sign * reduce(lambda a, b: 10 * a + b, mantissa)
|
||||
if exponent < 0:
|
||||
return parsers._normalize(mantissa, 10 ** (-exponent))
|
||||
else:
|
||||
return (mantissa * 10 ** exponent, 1)
|
||||
|
||||
@staticmethod
|
||||
def _parseFloat(val, mi=None , ma= None):
|
||||
number = float(val)
|
||||
if (mi and number < mi) or (ma and number > ma):
|
||||
raise Exception("Invalid Range")
|
||||
return number
|
||||
|
||||
@staticmethod
|
||||
def parseGain(val):
|
||||
try:
|
||||
return parsers._parseFloat(val, 0.0, None)
|
||||
except Exception as e:
|
||||
raise Exception("Invalid Gain: %s" % e)
|
||||
|
||||
@staticmethod
|
||||
def parseLongitude(val):
|
||||
try:
|
||||
return parsers._parseFloat(val, -180.0, 180.0)
|
||||
except Exception as e:
|
||||
raise Exception("Invalid Longitude: %s" % e)
|
||||
|
||||
@staticmethod
|
||||
def parseLatitude(val):
|
||||
try:
|
||||
return parsers._parseFloat(val, -90.0, 90.0)
|
||||
except Exception as e:
|
||||
raise Exception("Invalid Latitude: %s" % e)
|
||||
|
||||
@staticmethod
|
||||
def parseDepth(val):
|
||||
# Deepest mine ~ 5000 m
|
||||
try:
|
||||
return parsers._parseFloat(val, 0.0, 5000)
|
||||
except Exception as e:
|
||||
raise Exception("Invalid Depth: %s" % e)
|
||||
|
||||
@staticmethod
|
||||
def parseElevation(val):
|
||||
# Highest Everest ~8500 m
|
||||
# Deepest Mariana ~11000 m
|
||||
try:
|
||||
return parsers._parseFloat(val, -11000, 9000)
|
||||
except Exception as e:
|
||||
raise Exception("Invalid Elevation: %s" % e)
|
||||
|
||||
@staticmethod
|
||||
def parseDate(val):
|
||||
date=val.replace("/", "-")
|
||||
formats={ len("YYYY-JJJ") : "%Y-%j",
|
||||
len("YYYY-MM-DD") : "%Y-%m-%d",
|
||||
len("YYYY-JJJ:HHMM") : "%Y-%j:%H%M",
|
||||
len("YYYY-JJJTHH:MM") : "%Y-%jT%H:%M",
|
||||
len("YYYY-MM-DDTHH:MM") : "%Y-%m-%dT%H:%M",
|
||||
len("YYYY-JJJTHH:MM:SS") : "%Y-%jT%H:%M:%S",
|
||||
len("YYYY-MM-DDTHH:MM:SS") : "%Y-%m-%dT%H:%M:%S"}
|
||||
try:
|
||||
return datetime.strptime(date, formats[len(date)])
|
||||
except Exception as e:
|
||||
raise ValueError("invalid date: " + date + str(e))
|
||||
|
||||
@staticmethod
|
||||
def parseLocationCode(val):
|
||||
Code = val.strip()
|
||||
if len(Code) > 2 or len(re.sub("[A-Z0-9-*?]","",Code)) > 0:
|
||||
raise Exception("wrong code for location: %s" % Code)
|
||||
return Code
|
||||
|
||||
@staticmethod
|
||||
def parseStationCode(val):
|
||||
Code = val.strip()
|
||||
if not Code or len(Code) > 5 or len(re.sub("[A-Z0-9*?]","",Code)) > 0:
|
||||
raise Exception("Wrong code for station: %s" % Code)
|
||||
return Code
|
||||
|
||||
@staticmethod
|
||||
def parseChannelCode(val):
|
||||
Code = val.strip()
|
||||
if not Code or len(Code) > 3 or len(re.sub("[A-Z0-9*?]","",Code)) > 0:
|
||||
raise Exception("Wrong code for channel: %s" % Code)
|
||||
return Code
|
||||
|
||||
@staticmethod
|
||||
def parseNetworkCode(val):
|
||||
Code = val.strip()
|
||||
if not Code or len(Code) > 2 or len(re.sub("[A-Z0-9*?]","",Code)) > 0:
|
||||
raise Exception("Wrong code for network: %s" % Code)
|
||||
return Code
|
||||
1119
lib/python/nettab/lineType.py
Normal file
1119
lib/python/nettab/lineType.py
Normal file
File diff suppressed because it is too large
Load Diff
1645
lib/python/nettab/nettab.py
Normal file
1645
lib/python/nettab/nettab.py
Normal file
File diff suppressed because it is too large
Load Diff
523
lib/python/nettab/nodesi.py
Normal file
523
lib/python/nettab/nodesi.py
Normal file
@ -0,0 +1,523 @@
|
||||
from __future__ import print_function
|
||||
from .lineType import Dl, Se, Ff, Pz, Cl
|
||||
from .basesc3 import sc3
|
||||
import sys
|
||||
|
||||
class prefixable(object):
|
||||
def adjust(self, prefix):
|
||||
if prefix:
|
||||
self.id = "%s:%s" % (prefix, self.id)
|
||||
|
||||
class Instruments(object):
|
||||
def __init__(self, prefix=""):
|
||||
self.keys = []
|
||||
self.ses = {}
|
||||
self.dls = {}
|
||||
self.fls = {}
|
||||
self.cls = {}
|
||||
self._sensors = {}
|
||||
self._datalogger = {}
|
||||
self._filters = {}
|
||||
self._Cal = {}
|
||||
self._prefix = prefix
|
||||
|
||||
def sc3Objs(self):
|
||||
objs = []
|
||||
|
||||
for s in list(self._sensors.values()):
|
||||
objs.append(s.sc3Obj(self))
|
||||
|
||||
for s in list(self._datalogger.values()):
|
||||
objs.append(s.sc3Obj(self))
|
||||
|
||||
for s in list(self._filters.values()):
|
||||
objs.append(s.sc3Obj(self))
|
||||
|
||||
return objs
|
||||
|
||||
def add(self, obj):
|
||||
where = None
|
||||
|
||||
if isinstance(obj, Se):
|
||||
where = self.ses
|
||||
elif isinstance(obj, Dl):
|
||||
where = self.dls
|
||||
elif isinstance(obj, Cl):
|
||||
where = self.cls
|
||||
elif isinstance(obj, Ff) or isinstance(obj, Pz):
|
||||
where = self.fls
|
||||
else:
|
||||
raise Exception("Object type %s doesn't fir this class" % type(obj))
|
||||
|
||||
if obj.id in self.keys:
|
||||
raise Exception("Object id %s already exist." % (obj))
|
||||
|
||||
self.keys.append(obj.id)
|
||||
where[obj.id] = obj
|
||||
|
||||
return
|
||||
|
||||
def instrumentId(self, iid, gain):
|
||||
if gain is None:
|
||||
if iid in self.dls:
|
||||
gain = self.dls[iid].gain
|
||||
elif iid in self.ses:
|
||||
gain = self.ses[iid].gain
|
||||
else:
|
||||
raise Exception("Instrument iid not found")
|
||||
|
||||
siid = "%s/g=%s" % (iid, int(float(gain)))
|
||||
return siid
|
||||
|
||||
def loadDataloggerCalibrations(self, dsm, dsn, dch, dsg, start, end, dd):
|
||||
cls = []
|
||||
for cl in self.cls.values():
|
||||
if cl.type != "L": continue
|
||||
if cl.match(dsm, dsn):
|
||||
cls.append(Calibration(cl, dch, start, end))
|
||||
|
||||
if len(cls) == 0:
|
||||
if dsn in self.cls:
|
||||
print("[%s] No calibrations found for serial number %s and model %s " % (dsm, dsn, dsm), file=sys.stderr)
|
||||
return
|
||||
|
||||
diid = self.instrumentId(dsm, dsg)
|
||||
try:
|
||||
datalogger = self._datalogger[diid].sc3Obj(self)
|
||||
if dd != datalogger.publicID():
|
||||
raise Exception("Public Id doesn't match")
|
||||
except:
|
||||
raise Exception("[%s] Could not retrieve datalogger %s" % (dsm, diid))
|
||||
|
||||
for cl in cls:
|
||||
if (dsm, dsn, dch, start, end) in self._Cal:
|
||||
## print >> sys.stderr,"[%s] Skiping calibration channel %s" % (dsm, cl.channel)
|
||||
continue
|
||||
## print >> sys.stderr,"[%s] Adding calibration %s (%s)" % (dsm, cl.channel, dd)
|
||||
datalogger.add(cl.sc3Obj(self))
|
||||
self._Cal[(dsm, dsn, dch, start, end)] = cl
|
||||
|
||||
def loadSensorCalibrations(self, ssm, ssn, sch, ssg, start, end, ss):
|
||||
cls = []
|
||||
for cl in self.cls.values():
|
||||
if cl.type != "S": continue
|
||||
if cl.match(ssm, ssn):
|
||||
cls.append(Calibration(cl, sch, start, end))
|
||||
|
||||
if len(cls) == 0:
|
||||
if ssn in self.cls:
|
||||
print("[%s] No calibrations found for serial number %s and model %s " % (ssm,ssn, ssm), file=sys.stderr)
|
||||
return
|
||||
|
||||
siid = self.instrumentId(ssm, ssg)
|
||||
try:
|
||||
sensor = self._sensors[siid].sc3Obj(self)
|
||||
if ss != sensor.publicID():
|
||||
raise Exception("Public Id doesn't match")
|
||||
except:
|
||||
raise Exception("[%s] Could not retrieve sensor %s" % (ssm, siid))
|
||||
|
||||
for cl in cls:
|
||||
if (ssm, ssn, sch, start, end) in self._Cal:
|
||||
## print >> sys.stderr,"[%s] Skiping calibration channel %s" % (ssm, cl.channel)
|
||||
continue
|
||||
## print >> sys.stderr,"[%s] Adding calibration %s channel %s start %s" % (ssm, ssn, cl.channel, start)
|
||||
sensor.add(cl.sc3Obj(self))
|
||||
self._Cal[(ssm, ssn, sch, start, end)] = cl
|
||||
|
||||
def check(self, networks):
|
||||
error = []
|
||||
|
||||
# Dataloggers check
|
||||
error.append("* Dataloggers:")
|
||||
for dl in self.dls.values():
|
||||
error.extend(dl.check(self))
|
||||
error.append("")
|
||||
|
||||
# Check fir filters
|
||||
error.append("* Filters:")
|
||||
for f in self.fls.values():
|
||||
c = False
|
||||
for dl in self.dls.values():
|
||||
c = c or dl.use(f)
|
||||
if c: break
|
||||
if not c: error.append(" [%s] filter is not used" % f.id)
|
||||
error.append("")
|
||||
|
||||
|
||||
# Check the calibrations
|
||||
error.append("* Calibrations:")
|
||||
for cl in self.cls.values():
|
||||
error.extend(cl.check(self))
|
||||
error.append("")
|
||||
|
||||
|
||||
error.append("* Sensors:")
|
||||
for f in self.ses.values():
|
||||
c = False
|
||||
for network in networks.values():
|
||||
for station in network.stations:
|
||||
for location in station.locations:
|
||||
for channel in location.channels:
|
||||
c = c or channel.use(f)
|
||||
if c: break
|
||||
if c: break
|
||||
if c: break
|
||||
if c: break
|
||||
if not c: error.append(" [%s] sensor is not used" % f.id)
|
||||
error.append("")
|
||||
|
||||
error.append("* Dataloggers:")
|
||||
for f in self.dls.values():
|
||||
c = False
|
||||
for network in networks.values():
|
||||
c = c or network.use(f)
|
||||
if c: break
|
||||
if not c: error.append(" [%s] datalogger is not used" % f.id)
|
||||
error.append("")
|
||||
|
||||
return error
|
||||
|
||||
def filterType(self, iid):
|
||||
if iid not in self.keys:
|
||||
raise Exception("[%s] Filter id not found" % iid)
|
||||
|
||||
if iid not in self.fls:
|
||||
raise Exception("[%s] Object is not a filter" % iid)
|
||||
|
||||
obj = self.fls[iid]
|
||||
if isinstance(obj, Ff):
|
||||
fType = 'D'
|
||||
elif isinstance(obj, Pz):
|
||||
fType = obj.type
|
||||
|
||||
return fType
|
||||
|
||||
def filterID(self, iid):
|
||||
if iid not in self.keys:
|
||||
raise Exception("[%s] Filter id not found" % iid)
|
||||
|
||||
if iid not in self.fls:
|
||||
raise Exception("[%s] Object is not a filter" % iid)
|
||||
|
||||
if iid not in self._filters:
|
||||
obj = self.fls[iid]
|
||||
if isinstance(obj, Pz):
|
||||
## print >> sys.stderr," Generating new Filter (PZ): %s %s" % (iid,obj.type)
|
||||
newFilter = Paz(obj)
|
||||
elif isinstance(obj, Ff):
|
||||
## print >> sys.stderr," Generating new Filter (Fir): %s" % (iid)
|
||||
newFilter = Fir(obj)
|
||||
newFilter.adjust(self._prefix)
|
||||
if newFilter.id != self.prefix(iid):
|
||||
raise Exception("Invalid filter created %s" % (iid))
|
||||
self._filters[iid] = newFilter
|
||||
|
||||
return self._filters[iid].sc3ID(self)
|
||||
|
||||
def prefix(self, iid):
|
||||
if self._prefix:
|
||||
iid = "%s:%s" % (self._prefix, iid)
|
||||
return iid
|
||||
|
||||
def dataloggerID(self, iid, gain = None):
|
||||
if iid not in self.keys:
|
||||
raise Exception("Object not found.")
|
||||
|
||||
if iid not in self.dls:
|
||||
raise Exception("[%s] Object is not a datalogger" % iid)
|
||||
|
||||
diid = self.instrumentId(iid, gain)
|
||||
|
||||
if diid not in self._datalogger:
|
||||
## print >> sys.stderr,"Generating datalogger %s -> %s" % (iid, diid)
|
||||
newDatalogger = Dataloger(self.dls[iid], gain)
|
||||
newDatalogger.adjust(self._prefix)
|
||||
if newDatalogger.id != self.prefix(diid):
|
||||
raise Exception("Invalid datalogger created %s %s" % (iid, diid))
|
||||
self._datalogger[diid] = newDatalogger
|
||||
|
||||
return self._datalogger[diid].sc3ID(self)
|
||||
|
||||
def sensorID(self, iid, gain = None):
|
||||
if iid not in self.keys:
|
||||
raise Exception("Object not found.")
|
||||
|
||||
if iid not in self.ses:
|
||||
raise Exception("[%s] Object is not a sensor" % iid)
|
||||
|
||||
diid = self.instrumentId(iid, gain)
|
||||
|
||||
if diid not in self._sensors:
|
||||
## print >> sys.stderr,"Generating Sensor %s -> %s" % (iid, diid)
|
||||
newSensor = Sensor(self.ses[iid], gain)
|
||||
newSensor.adjust(self._prefix)
|
||||
if newSensor.id != self.prefix(diid):
|
||||
raise Exception("Invalid sensor created %s %s" % (iid, diid))
|
||||
self._sensors[diid] = newSensor
|
||||
|
||||
return self._sensors[diid].sc3ID(self)
|
||||
|
||||
def _findObject(self, objID, where):
|
||||
obj = None
|
||||
for ob in where.values():
|
||||
obj = ob.sc3Obj(self)
|
||||
if obj.publicID() == objID:
|
||||
break;
|
||||
if not obj:
|
||||
raise Exception("Object not found: %s " % objID)
|
||||
return obj
|
||||
|
||||
def _findCallibration(self, obj, count, serialNumber, channel, start):
|
||||
if serialNumber is None:
|
||||
return None
|
||||
if channel is None:
|
||||
return None
|
||||
|
||||
for cal in [obj(i) for i in range(0, count)]:
|
||||
if cal.serialNumber() == serialNumber and cal.channel() == channel:
|
||||
return cal.gain()
|
||||
return None
|
||||
|
||||
def _sensorGain(self, seID, serialNumber, channel, start):
|
||||
sensor = self._findObject(seID, self._sensors)
|
||||
if not sensor:
|
||||
raise Exception("Not found %s" % seID)
|
||||
|
||||
sensorFilter = self._findObject(sensor.response(), self._filters)
|
||||
if not sensorFilter:
|
||||
raise Exception("Not found %s" % seID)
|
||||
|
||||
gainFrequency = sensorFilter.gainFrequency()
|
||||
try:
|
||||
gainUnit = sensor.unit()
|
||||
except:
|
||||
print("[%s] No gain unit supplied" % seID, file=sys.stderr)
|
||||
gainUnit = None
|
||||
|
||||
gain = self._findCallibration(sensor.sensorCalibration, sensor.sensorCalibrationCount(), serialNumber, channel, start)
|
||||
if gain is not None:
|
||||
## print >> sys.stderr,'[%s] Using sensor gain from calibration %s' % (serialNumber, gain)
|
||||
pass
|
||||
else:
|
||||
gain = sensorFilter.gain()
|
||||
|
||||
return (gain, gainFrequency, gainUnit)
|
||||
|
||||
def _dataloggerGain(self, dtID, serialNumber, channel, Numerator, Denominator, start):
|
||||
datalogger = self._findObject(dtID, self._datalogger)
|
||||
gain = self._findCallibration(datalogger.dataloggerCalibration, datalogger.dataloggerCalibrationCount(), serialNumber, channel, start)
|
||||
if gain is not None:
|
||||
##print >> sys.stderr,'[%s] Using datalogger gain from calibration %s' % (serialNumber, gain)
|
||||
pass
|
||||
else:
|
||||
gain = datalogger.gain()
|
||||
|
||||
decimation = None
|
||||
for i in range(0,datalogger.decimationCount()):
|
||||
decimation = datalogger.decimation(i)
|
||||
if decimation.sampleRateNumerator() == Numerator and decimation.sampleRateDenominator() == Denominator:
|
||||
break
|
||||
decimation = None
|
||||
|
||||
if not decimation:
|
||||
raise Exception("Decimation not found %s/%s" % (Numerator, Denominator))
|
||||
|
||||
af = decimation.analogueFilterChain().content().split()
|
||||
df = decimation.digitalFilterChain().content().split()
|
||||
|
||||
for fiID in af:
|
||||
g = self._findObject(fiID, self._filters).gain()
|
||||
#print >> sys.stderr,"Multiplying by %s %s" % (fiID, g)
|
||||
gain = gain * g
|
||||
|
||||
for fiID in df:
|
||||
g = self._findObject(fiID, self._filters).gain()
|
||||
#print >> sys.stderr,"Multiplying by %s %s" % (fiID, g)
|
||||
gain = gain * g
|
||||
|
||||
return gain
|
||||
|
||||
def getChannelGainAttribute(self, dtID, seID, dtSerialNumber, seSerialNumber, dtChannel, seChannel, Numerator, Denominator, channelStart):
|
||||
if not dtID or not seID:
|
||||
raise Exception("Empty instruments ID supplied.")
|
||||
|
||||
(sensorGain, sensorFrequency,sensorUnit) = self._sensorGain(seID, seSerialNumber, seChannel, channelStart)
|
||||
dataloggerGain = self._dataloggerGain(dtID, dtSerialNumber, dtChannel, Numerator, Denominator, channelStart)
|
||||
|
||||
att = {}
|
||||
att['Gain'] = sensorGain * dataloggerGain
|
||||
if sensorFrequency is not None:
|
||||
att['GainFrequency'] = sensorFrequency
|
||||
if sensorUnit is not None:
|
||||
att['GainUnit'] = sensorUnit
|
||||
return att
|
||||
|
||||
class Paz(sc3, prefixable):
|
||||
def __init__(self, pz):
|
||||
sc3.__init__(self, 'paz')
|
||||
self.id = pz.id
|
||||
self.att = pz.getAttributes()
|
||||
|
||||
def sc3Att(self):
|
||||
att = {}
|
||||
att['Name'] = self.id
|
||||
|
||||
for (key,value) in self.att.items():
|
||||
if not self.sc3ValidKey(key) or key in att:
|
||||
print(" [%s] [%s] Ignoring Attribute %s = %s " % (self.sc3Mode, self.id, key,value), file=sys.stderr)
|
||||
continue
|
||||
att[key] = value
|
||||
|
||||
return att
|
||||
|
||||
class Sensor(sc3, prefixable):
|
||||
def __init__(self, se, gain = None):
|
||||
sc3.__init__(self, 'sensor')
|
||||
self.baseid = se.id
|
||||
self.att = se.getAttributes()
|
||||
|
||||
self.pz = se.generatePz(gain)
|
||||
|
||||
self.id = "%s/g=%s" % (self.baseid, int(float(self.pz.gain)))
|
||||
|
||||
def sc3Resolv(self, inventory):
|
||||
try:
|
||||
self.att['Response'] = inventory.filterID(self.pz.id)
|
||||
## print >> sys.stderr,"Re-used a sensor pole-zero"
|
||||
except:
|
||||
inventory.add(self.pz)
|
||||
self.att['Response'] = inventory.filterID(self.pz.id)
|
||||
|
||||
def sc3Att(self):
|
||||
att = {}
|
||||
|
||||
att['Name'] = self.id
|
||||
for (key, value) in self.att.items():
|
||||
if not self.sc3ValidKey(key) or key in att:
|
||||
print(" [%s] [%s] ignoring Attribute %s = %s " % (self.sc3Mode, self.id, key, value), file=sys.stderr)
|
||||
continue
|
||||
att[key] = value
|
||||
|
||||
## Forcing needed description on the sensor
|
||||
if 'Description' not in att:
|
||||
att['Description'] = self.id
|
||||
|
||||
return att
|
||||
|
||||
class Fir(sc3, prefixable):
|
||||
def __init__(self, ff):
|
||||
sc3.__init__(self, 'fir')
|
||||
self.id = ff.id
|
||||
self.gain = ff.gain
|
||||
self.att = ff.getAttributes()
|
||||
|
||||
def sc3Att(self):
|
||||
att = {}
|
||||
att['Name'] = self.id
|
||||
|
||||
for (key,value) in self.att.items():
|
||||
if not self.sc3ValidKey(key) or key in att :
|
||||
print(" [%s] [%s] Ignoring Attribute %s = %s " % (self.sc3Mode, self.id, key,value), file=sys.stderr)
|
||||
continue
|
||||
att[key] = value
|
||||
return att
|
||||
|
||||
class Decimation(sc3):
|
||||
def __init__(self, numerator, decimator, dl):
|
||||
sc3.__init__(self, 'decimation')
|
||||
self._numerator = numerator
|
||||
self._denominator = decimator
|
||||
self.chains = dl.chains[(numerator, decimator)]
|
||||
self.att = {}
|
||||
|
||||
def sc3Resolv(self, inventory):
|
||||
sequence = {}
|
||||
sequence['A'] = []
|
||||
sequence['D'] = []
|
||||
|
||||
|
||||
for stage in self.chains:
|
||||
sid = inventory.filterID(stage)
|
||||
ADtype = inventory.filterType(stage)
|
||||
sequence[ADtype].append(sid)
|
||||
|
||||
self.att['AnalogueFilterChain'] = " ".join(sequence['A'])
|
||||
self.att['DigitalFilterChain'] = " ".join(sequence['D'])
|
||||
|
||||
def sc3Att(self):
|
||||
att = {}
|
||||
att['SampleRateNumerator'] = self._numerator
|
||||
att['SampleRateDenominator'] = self._denominator
|
||||
att.update(self.att)
|
||||
return att
|
||||
|
||||
class Dataloger(sc3, prefixable):
|
||||
def __init__(self, dl, gain = None):
|
||||
dcs = []
|
||||
sc3.__init__(self, 'datalogger', dcs)
|
||||
|
||||
if gain:
|
||||
self.gain = gain
|
||||
else:
|
||||
self.gain = dl.gain
|
||||
|
||||
self.att = dl.getAttributes()
|
||||
|
||||
self.id = "%s/g=%s" % (dl.id, int(float(self.gain)))
|
||||
self.maxClockDrift = dl.mcld
|
||||
|
||||
if dl.chains:
|
||||
for (num, dec) in dl.chains:
|
||||
dcs.append(Decimation(num, dec, dl))
|
||||
self.dcs = dcs
|
||||
else:
|
||||
print("[%s] Datalogger %s has no stages." % (self.id, dl), file=sys.stderr)
|
||||
|
||||
def sc3Att(self):
|
||||
att = {}
|
||||
att['Name'] = self.id
|
||||
att['Gain'] = self.gain
|
||||
att['MaxClockDrift'] = self.maxClockDrift
|
||||
|
||||
for (key,value) in self.att.items():
|
||||
if not self.sc3ValidKey(key) or key in att:
|
||||
print(" [%s] [%s] ignoring Attribute %s = %s " % (self.sc3Mode, self.id, key, value), file=sys.stderr)
|
||||
continue
|
||||
att[key] = value
|
||||
|
||||
## Forcing needed description on the sensor
|
||||
if 'Description' not in att:
|
||||
att['Description'] = self.id
|
||||
|
||||
return att
|
||||
|
||||
class Calibration(sc3):
|
||||
def __init__(self, cl, channel, start, end):
|
||||
if cl.type == "S":
|
||||
sc3.__init__(self, "sensorCalibration")
|
||||
else:
|
||||
sc3.__init__(self, "dataloggerCalibration")
|
||||
|
||||
if channel < 0 or channel >= cl.channelCount:
|
||||
raise Exception("Invalid channel for calibration [%s]" % channel)
|
||||
|
||||
self.start = start
|
||||
self.end = end
|
||||
self.channel = channel
|
||||
self.id = cl.id
|
||||
self.att = cl.getAttributes(channel)
|
||||
|
||||
def sc3Att(self):
|
||||
att = {}
|
||||
att['SerialNumber'] = self.id
|
||||
att['Start'] = self.start
|
||||
if self.end:
|
||||
att['End'] = self.end
|
||||
|
||||
for (key, value) in self.att.items():
|
||||
if not self.sc3ValidKey(key) or key in att:
|
||||
print(" [%s] [%s] Ignoring Attribute %s = %s " % (self.sc3Mode, self.id, key,value), file=sys.stderr)
|
||||
continue
|
||||
att[key] = value
|
||||
return att
|
||||
489
lib/python/nettab/nodesnslc.py
Normal file
489
lib/python/nettab/nodesnslc.py
Normal file
@ -0,0 +1,489 @@
|
||||
from __future__ import print_function
|
||||
from .lineType import Sl, Nw, Sr, Sg
|
||||
from .nodesi import Instruments
|
||||
from .basesc3 import sc3
|
||||
import sys
|
||||
|
||||
debug = 0
|
||||
|
||||
class DontFit(Exception):
|
||||
def __init__(self, message):
|
||||
Exception.__init__(self, message)
|
||||
|
||||
class nslc(object):
|
||||
def __init__(self):
|
||||
self.start = None
|
||||
self.end = None
|
||||
self.code = None
|
||||
|
||||
def __overlap__(self, another):
|
||||
if self.end:
|
||||
if self.end > another.start:
|
||||
if not another.end or self.start < another.end:
|
||||
return True
|
||||
else:
|
||||
if not another.end or self.start < another.end:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _span(self):
|
||||
return "%s / %s" % (self.start, self.end)
|
||||
|
||||
def sc3Att(self):
|
||||
att = {}
|
||||
|
||||
att['Start'] = self.start
|
||||
if self.end:
|
||||
att['End'] = self.end
|
||||
att['Code'] = self.code
|
||||
|
||||
for (key,value) in self.att.items():
|
||||
if not self.sc3ValidKey(key) or key in att:
|
||||
print("[%s] type %s ignoring attribute %s = %s " % (self.code, self.sc3Mode, key,value), file=sys.stderr)
|
||||
continue
|
||||
|
||||
att[key] = value
|
||||
return att
|
||||
|
||||
def _cmptime(t1, t2):
|
||||
if t1 is None and t2 is None:
|
||||
return 0
|
||||
elif t2 is None or (t1 is not None and t1 < t2):
|
||||
return -1
|
||||
elif t1 is None or (t2 is not None and t1 > t2):
|
||||
return 1
|
||||
return 0
|
||||
|
||||
class StationGroup(nslc,sc3):
|
||||
def __str__(self):
|
||||
return "%s" % (self.code)
|
||||
|
||||
def __init__(self, sg):
|
||||
if not isinstance(sg,Sg):
|
||||
return False
|
||||
|
||||
self.stationReferences = []
|
||||
sc3.__init__(self, 'stationGroup', self.stationReferences)
|
||||
|
||||
self.code = sg.code
|
||||
self.start = sg.start
|
||||
self.end = sg.end
|
||||
self.att = sg.getStationGroupAttributes()
|
||||
self.srdata = []
|
||||
|
||||
def __match__(self, sr):
|
||||
if not isinstance(sr,Sr):
|
||||
return False
|
||||
|
||||
return (_cmptime(sr.start, self.end) <= 0 and _cmptime(sr.end, self.start) >= 0)
|
||||
|
||||
def conflict(self, another):
|
||||
if self.code != another.code:
|
||||
return False
|
||||
|
||||
if self.end:
|
||||
if self.end <= another.start:
|
||||
return False
|
||||
if another.end and another.end <= self.start:
|
||||
return False
|
||||
else:
|
||||
if another.end and another.end <= self.start:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def Sr(self, sr):
|
||||
self.srdata.append((sr.ncode, sr.scode, sr.start, sr.end))
|
||||
|
||||
def sc3Resolv(self, inventory):
|
||||
for (ncode, scode, start, end) in self.srdata:
|
||||
try:
|
||||
for stationID in inventory.resolveStation(ncode, scode, start, end):
|
||||
st = StationReference(self, stationID)
|
||||
self.stationReferences.append(st)
|
||||
except Exception as e:
|
||||
sys.stderr.write(str(e) + "\n")
|
||||
|
||||
class StationReference(sc3):
|
||||
def __str__(self):
|
||||
return "%s" % (self.att["StationID"])
|
||||
|
||||
def __init__(self, stationGroup, stationID):
|
||||
self.stationGroup = stationGroup
|
||||
sc3.__init__(self, 'stationReference')
|
||||
|
||||
self.att = { "StationID": stationID }
|
||||
|
||||
def sc3Att(self):
|
||||
return self.att
|
||||
|
||||
class Network(nslc, sc3):
|
||||
def __str__(self):
|
||||
return "%s" % (self.code)
|
||||
|
||||
def __init__(self, nw):
|
||||
if not isinstance(nw,Nw):
|
||||
return False
|
||||
|
||||
self.stations = []
|
||||
sc3.__init__(self, 'network', self.stations)
|
||||
|
||||
nslc.__init__(self)
|
||||
self.code = nw.code
|
||||
self.start = nw.start
|
||||
self.end = nw.end
|
||||
self.att = nw.getNetworkAttributes()
|
||||
|
||||
def __match__(self, sl):
|
||||
if not isinstance(sl,Sl):
|
||||
return False
|
||||
|
||||
if sl.start < self.start:
|
||||
return False
|
||||
if self.end:
|
||||
if not sl.end or sl.end > self.end:
|
||||
return False
|
||||
return True
|
||||
|
||||
def conflict(self, another):
|
||||
if self.code != another.code:
|
||||
return False
|
||||
|
||||
if self.end:
|
||||
if self.end <= another.start:
|
||||
return False
|
||||
if another.end and another.end <= self.start:
|
||||
return False
|
||||
else:
|
||||
if another.end and another.end <= self.start:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def Sl(self, sl):
|
||||
if not self.__match__(sl):
|
||||
raise DontFit(" Object doesn't fit this network object.")
|
||||
inserted = False
|
||||
for sta in self.stations:
|
||||
try:
|
||||
where = "%s" % (sta._span())
|
||||
sta.Sl(sl)
|
||||
if debug: print("[%s] inserted at %s -> %s" % (self, where, sta._span()), file=sys.stderr)
|
||||
inserted = True
|
||||
for other in self.stations:
|
||||
if other is sta: continue
|
||||
if other.conflict(sta):
|
||||
raise Exception("I Station conflict with already existing station (%s/%s/%s)" % (other, other.start, other.end))
|
||||
break
|
||||
except DontFit:
|
||||
pass
|
||||
if not inserted:
|
||||
st = Station(self, sl)
|
||||
if debug: print("[%s] created new station %s %s" % (self, st, st._span()), file=sys.stderr)
|
||||
for sta in self.stations:
|
||||
if sta.conflict(st):
|
||||
raise Exception("Station conflict with already existing station (%s/%s/%s)" % (sta, sta.start, sta.end))
|
||||
self.stations.append(st)
|
||||
|
||||
def check(self, i):
|
||||
error = []
|
||||
for station in self.stations:
|
||||
error.extend(station.check(i))
|
||||
return error
|
||||
|
||||
def use(self, iid):
|
||||
c = False
|
||||
for station in self.stations:
|
||||
c = c or station.use(iid)
|
||||
if c: break
|
||||
return c
|
||||
|
||||
class Station(nslc, sc3):
|
||||
def __str__(self):
|
||||
return "%s.%s" % (self.network.code, self.code)
|
||||
|
||||
def __init__(self, network, sl):
|
||||
if not isinstance(sl,Sl):
|
||||
return False
|
||||
|
||||
self.locations = []
|
||||
self.network = network
|
||||
sc3.__init__(self, 'station', self.locations)
|
||||
|
||||
# I load myself as a station
|
||||
nslc.__init__(self)
|
||||
self.code = sl.code
|
||||
self.start = sl.start
|
||||
self.end = sl.end
|
||||
self.att = sl.getStationAttributes()
|
||||
|
||||
# Further parse to generate my locations
|
||||
self.Sl(sl)
|
||||
|
||||
def __match__(self, obj):
|
||||
if not isinstance(obj,Sl):
|
||||
return False
|
||||
# Check code
|
||||
if obj.code != self.code:
|
||||
return False
|
||||
# Attributes
|
||||
att = obj.getStationAttributes()
|
||||
for at in att:
|
||||
# Make sure that all attributes in Sl-line are here
|
||||
if at not in self.att:
|
||||
return False
|
||||
# And they match
|
||||
if att[at] != self.att[at]:
|
||||
return False
|
||||
# Make sure that there is no other attribute here that is not on Sl-line
|
||||
for at in self.att:
|
||||
if at not in att:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def __adjustTime__(self, sl):
|
||||
if sl.start < self.start:
|
||||
self.start = sl.start
|
||||
if not self.end:
|
||||
return
|
||||
if sl.end and sl.end < self.end:
|
||||
return
|
||||
self.end = sl.end
|
||||
|
||||
def conflict(self, another):
|
||||
if not isinstance(another, Station):
|
||||
raise Exception("Cannot compare myself with %s" % type(another))
|
||||
if self.code != another.code:
|
||||
return False
|
||||
if not self.__overlap__(another):
|
||||
return False
|
||||
return True
|
||||
|
||||
def use(self, iid):
|
||||
c = False
|
||||
for location in self.locations:
|
||||
c = c or location.use(iid)
|
||||
if c: break
|
||||
return c
|
||||
|
||||
def check(self, i):
|
||||
error = []
|
||||
for location in self.locations:
|
||||
error.extend(location.check(i))
|
||||
return error
|
||||
|
||||
def Sl(self, sl):
|
||||
if not self.__match__(sl):
|
||||
raise DontFit(" sl doesn't fit this station %s/%s_%s." % (self.code, self.start, self.end))
|
||||
# Handle Time Adjustments
|
||||
self.__adjustTime__(sl)
|
||||
# Handle Locations
|
||||
inserted = False
|
||||
for loc in self.locations:
|
||||
try:
|
||||
where = loc._span()
|
||||
loc.Sl(sl)
|
||||
if debug: print(" [%s] inserted at %s -> %s" % (self, where, loc._span()), file=sys.stderr)
|
||||
inserted = True
|
||||
for other in self.locations:
|
||||
if other is loc: continue
|
||||
if other.conflict(loc):
|
||||
raise Exception("Location conflict with already existing location")
|
||||
break
|
||||
except DontFit:
|
||||
pass
|
||||
|
||||
if not inserted:
|
||||
loc = Location(self, sl)
|
||||
if debug: print(" [%s] created new location %s %s" % (self, loc, loc._span()), file=sys.stderr)
|
||||
for lc in self.locations:
|
||||
if lc.conflict(loc):
|
||||
raise Exception("Location conflict with already existing location")
|
||||
self.locations.append(loc)
|
||||
|
||||
def sc3Att(self):
|
||||
att = nslc.sc3Att(self)
|
||||
|
||||
## Make sure that we set the Remark
|
||||
if 'ArchiveNetworkCode' not in att:
|
||||
att['ArchiveNetworkCode'] = self.network.code
|
||||
|
||||
if 'Remark' not in att:
|
||||
att['Remark'] = ""
|
||||
return att
|
||||
|
||||
class Location(nslc, sc3):
|
||||
def __str__(self):
|
||||
return "%s.%s.%s" % (self.station.network.code, self.station.code, self.code)
|
||||
|
||||
def __init__(self, station, sl):
|
||||
if not isinstance(sl, Sl):
|
||||
return False
|
||||
self.channels = []
|
||||
sc3.__init__(self, 'location', self.channels)
|
||||
|
||||
nslc.__init__(self)
|
||||
self.station = station
|
||||
self.code = sl.location
|
||||
self.start = sl.start
|
||||
self.end = sl.end
|
||||
self.att = sl.getLocationAttributes()
|
||||
self.Sl(sl)
|
||||
|
||||
def __adjustTime__(self, sl):
|
||||
if sl.start < self.start:
|
||||
self.start = sl.start
|
||||
if not self.end:
|
||||
return
|
||||
if sl.end and sl.end < self.end:
|
||||
return
|
||||
self.end = sl.end
|
||||
|
||||
def __match__(self, obj):
|
||||
if not isinstance(obj, Sl):
|
||||
return False
|
||||
if obj.location != self.code:
|
||||
return False
|
||||
# Attributes
|
||||
att = obj.getLocationAttributes()
|
||||
for at in att:
|
||||
# Make sure that all attributes in Sl-line are here
|
||||
if at not in self.att:
|
||||
return False
|
||||
# And they match
|
||||
if att[at] != self.att[at]:
|
||||
return False
|
||||
# Make sure that there is no other attribute here that is not on Sl-line
|
||||
for at in self.att:
|
||||
if at not in att:
|
||||
return False
|
||||
return True
|
||||
|
||||
def conflict(self, another):
|
||||
if not isinstance(another, Location):
|
||||
raise Exception("Cannot compare myself with %s" % type(another))
|
||||
if self.code != another.code:
|
||||
return False
|
||||
if not self.__overlap__(another):
|
||||
return False
|
||||
return True
|
||||
|
||||
def use(self, iid):
|
||||
c = False
|
||||
for channel in self.channels:
|
||||
c = c or channel.use(iid)
|
||||
if c: break
|
||||
return c
|
||||
|
||||
def check(self, i):
|
||||
error = []
|
||||
for channel in self.channels:
|
||||
error.extend(channel.check(i))
|
||||
return error
|
||||
|
||||
def Sl(self, sl):
|
||||
if not self.__match__(sl):
|
||||
raise DontFit(" This obj doesn't match this Location '%s'" % self.code)
|
||||
|
||||
# Handle Time Adjustments
|
||||
self.__adjustTime__(sl)
|
||||
|
||||
# Create Channels
|
||||
for code in sl.channels:
|
||||
channel = (Channel(self, code, sl))
|
||||
if debug: print(" [%s] created new channel %s/%s" % (self, channel, channel._span()), file=sys.stderr)
|
||||
for echan in self.channels:
|
||||
if echan.conflict(channel):
|
||||
raise Exception("[%s] channel %s conflict with already existing channel" % (self, code))
|
||||
#print >>sys.stderr," Channel %s appended at '%s'" % (code, self.code)
|
||||
self.channels.append(channel)
|
||||
|
||||
class Channel(nslc, sc3):
|
||||
def __str__(self):
|
||||
return "%s.%s.%s.%s" % (self.location.station.network.code, self.location.station.code, self.location.code, self.code)
|
||||
|
||||
def __init__(self, location, code, sl):
|
||||
sc3.__init__(self, 'channel')
|
||||
self.location = location
|
||||
|
||||
nslc.__init__(self)
|
||||
self.code = code
|
||||
self.start = sl.start
|
||||
self.end = sl.end
|
||||
self.att = sl.getChannelAttributes(self.code)
|
||||
|
||||
## Bring the Instrument gains to the channel level
|
||||
self._sensorGain = sl.sensorGain
|
||||
self._dataloggerGain = sl.dataloggerGain
|
||||
|
||||
def conflict(self, another):
|
||||
if not isinstance(another, Channel):
|
||||
raise Exception("Cannot compare myself with %s" % type(another))
|
||||
if self.code != another.code:
|
||||
return False
|
||||
if not self.__overlap__(another):
|
||||
return False
|
||||
return True
|
||||
|
||||
def use(self, iid):
|
||||
if 'Datalogger' in self.att and iid == self.att['Datalogger']: return True
|
||||
if 'Sesor' in self.att and iid == self.att['Sensor']: return True
|
||||
return False
|
||||
|
||||
def check(self, i):
|
||||
good = []
|
||||
|
||||
if not isinstance(i, Instruments):
|
||||
raise Exception("Invalid instrument object")
|
||||
|
||||
if not self.att['Datalogger'] in i.keys:
|
||||
good.append("no Datalogger")
|
||||
|
||||
if not self.att['Sensor'] in i.keys:
|
||||
good.append("no Sensor")
|
||||
|
||||
if good:
|
||||
good = [ " [%s] %s" % (self, "/".join(good)) ]
|
||||
|
||||
return good
|
||||
|
||||
def sc3Resolv(self, inventory):
|
||||
if not inventory:
|
||||
print("[%s] Warning, inventory not supplied" % self.code, file=sys.stderr)
|
||||
return
|
||||
|
||||
try:
|
||||
ssm = self.att['Sensor']
|
||||
ssg = self._sensorGain
|
||||
sch = self.att['SensorChannel']
|
||||
ssn = self.att["SensorSerialNumber"] if "SensorSerialNumber" in self.att else None
|
||||
# Sensor publicID
|
||||
ss = inventory.sensorID(ssm, ssg)
|
||||
self.att['Sensor'] = ss
|
||||
|
||||
# Sensor Calibration
|
||||
inventory.loadSensorCalibrations(ssm, ssn, sch, ssg, self.start, self.end, ss)
|
||||
except Exception as e:
|
||||
print("[%s] Sensor Resolution Error %s" % (self, e), file=sys.stderr)
|
||||
ss = None
|
||||
|
||||
try:
|
||||
dsm = self.att['Datalogger']
|
||||
dsg = self._dataloggerGain
|
||||
dch = self.att['DataloggerChannel']
|
||||
dsn = self.att['DataloggerSerialNumber'] if 'DataloggerSerialNumber' in self.att else None
|
||||
|
||||
dt = inventory.dataloggerID(dsm, dsg)
|
||||
self.att['Datalogger'] = dt
|
||||
inventory.loadDataloggerCalibrations(dsm, dsn, dch, dsg, self.start, self.end, dt)
|
||||
except Exception as e:
|
||||
print("[%s] Datalogger Resolution Error %s" % (self, e), file=sys.stderr)
|
||||
dt = None
|
||||
|
||||
try:
|
||||
up = self.att['SampleRateNumerator']
|
||||
down = self.att['SampleRateDenominator']
|
||||
self.att.update(inventory.getChannelGainAttribute(dt, ss, dsn, ssn, dch, sch, up, down, self.start))
|
||||
except Exception as e:
|
||||
print("[%s] Cannot find gain back for the channel: %s" % (self,e), file=sys.stderr)
|
||||
65
lib/python/nettab/stationResolver.py
Normal file
65
lib/python/nettab/stationResolver.py
Normal file
@ -0,0 +1,65 @@
|
||||
import time, datetime
|
||||
|
||||
def _cmptime(t1, t2):
|
||||
if t1 is None and t2 is None:
|
||||
return 0
|
||||
elif t2 is None or (t1 is not None and t1 < t2):
|
||||
return -1
|
||||
elif t1 is None or (t2 is not None and t1 > t2):
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def _time2datetime(t):
|
||||
result = datetime.datetime(*time.strptime(t.toString("%Y-%m-%dT%H:%M:00Z"), "%Y-%m-%dT%H:%M:%SZ")[0:6])
|
||||
result += datetime.timedelta(microseconds=float(t.toString("%S.%f")) * 1000000)
|
||||
|
||||
class StationResolver(object):
|
||||
def __init__(self):
|
||||
self.stationMap = {}
|
||||
self.initialStations = set()
|
||||
|
||||
def collectStations(self, inventory, initial = False):
|
||||
for ni in range(inventory.networkCount()):
|
||||
n = inventory.network(ni)
|
||||
for si in range(n.stationCount()):
|
||||
s = n.station(si)
|
||||
|
||||
try:
|
||||
if initial:
|
||||
self.initialStations.add((n.code(), s.code()))
|
||||
|
||||
else:
|
||||
self.initialStations.remove((n.code(), s.code()))
|
||||
del self.stationMap[(n.code(), s.code())]
|
||||
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
item = self.stationMap[(n.code(), s.code())]
|
||||
|
||||
except KeyError:
|
||||
item = []
|
||||
self.stationMap[(n.code(), s.code())] = item
|
||||
|
||||
start = _time2datetime(s.start())
|
||||
try: end = _time2datetime(s.end())
|
||||
except: end = None
|
||||
|
||||
item.append((start, end, s.publicID()))
|
||||
|
||||
def resolveStation(self, ncode, scode, start, end):
|
||||
result = set()
|
||||
try:
|
||||
for (s, e, publicID) in self.stationMap[(ncode, scode)]:
|
||||
if _cmptime(start, e) <= 0 and _cmptime(end, s) >= 0:
|
||||
result.add(publicID)
|
||||
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if not result:
|
||||
raise Exception("Station reference %s,%s cannot be resolved" % (ncode, scode))
|
||||
|
||||
return result
|
||||
|
||||
364
lib/python/nettab/tab.py
Normal file
364
lib/python/nettab/tab.py
Normal file
@ -0,0 +1,364 @@
|
||||
from __future__ import print_function
|
||||
from .lineType import Nw, Sg, Sr, Sl, Sa, Na, Dl, Se, Ff, Pz, Ia, Cl
|
||||
from .nodesi import Instruments
|
||||
from .nodesnslc import Network, StationGroup, DontFit
|
||||
import seiscomp.datamodel, seiscomp.io, seiscomp.client
|
||||
from .stationResolver import StationResolver
|
||||
import sys
|
||||
import os
|
||||
import glob
|
||||
import re
|
||||
|
||||
__VERSION__ = "0.1"
|
||||
|
||||
class Tab(object):
|
||||
def version(self):
|
||||
return __VERSION__
|
||||
|
||||
def __init__(self, instrumentPrefix = None, defaultsFile = None, filterFolder = None, xmlFolder = None, database = None):
|
||||
self.i = Instruments(instrumentPrefix)
|
||||
self.n = {}
|
||||
self.g = {}
|
||||
self.sas = []
|
||||
self.nas = []
|
||||
self.ias = []
|
||||
self.stationResolver = StationResolver()
|
||||
|
||||
self._filterFolder = None
|
||||
|
||||
print("Starting tab2inv version %s" % self.version(), file=sys.stderr)
|
||||
|
||||
if not filterFolder:
|
||||
print(" Warning, not filter folder supplied.", file=sys.stderr)
|
||||
else:
|
||||
if not os.path.isdir(filterFolder):
|
||||
raise Exception("Filter folder does not exist.")
|
||||
|
||||
self._filterFolder = filterFolder
|
||||
|
||||
if defaultsFile is not None:
|
||||
self._defaults(defaultsFile)
|
||||
|
||||
if database is not None:
|
||||
self._loadDatabase(database)
|
||||
|
||||
if xmlFolder is not None:
|
||||
self._loadXml(xmlFolder)
|
||||
|
||||
def _defaults(self, filename):
|
||||
sas = []
|
||||
ias = []
|
||||
nas = []
|
||||
try:
|
||||
fd = open(filename)
|
||||
print(" Parsing defaults file: %s" % (filename), file=sys.stderr)
|
||||
for line in fd:
|
||||
line = line.strip()
|
||||
if not line or line[0] == "#": continue
|
||||
(Type, Content) = line.split(":",1)
|
||||
if Type == "Nw":
|
||||
raise Exception("Defaults file can only contain attributes")
|
||||
elif Type == "Na":
|
||||
nas.append(Na(Content))
|
||||
elif Type == "Sa":
|
||||
sas.append(Sa(Content))
|
||||
elif Type == "Sl":
|
||||
raise Exception("Defaults file can only contain attributes")
|
||||
elif Type == "Ia":
|
||||
ias.append(Ia(Content))
|
||||
elif Type == "Se":
|
||||
raise Exception("Defaults file can only contain attributes")
|
||||
elif Type == "Dl":
|
||||
raise Exception("Defaults file can only contain attributes")
|
||||
elif Type == "Cl":
|
||||
raise Exception("Defaults file can only contain attributes")
|
||||
elif Type == "Ff":
|
||||
raise Exception("Defaults file can only contain attributes")
|
||||
elif Type == "If":
|
||||
raise Exception("Defaults file can only contain attributes")
|
||||
elif Type == "Pz":
|
||||
raise Exception("Defaults file can only contain attributes")
|
||||
else:
|
||||
print(" Ignored line", line, file=sys.stderr)
|
||||
fd.close()
|
||||
except Exception as e:
|
||||
print(" Warning: %s" % e, file=sys.stderr)
|
||||
pass
|
||||
|
||||
self.sas = sas
|
||||
self.nas = nas
|
||||
self.ias = ias
|
||||
|
||||
def _loadDatabase(self, dbUrl):
|
||||
m = re.match("(?P<dbDriverName>^.*):\/\/(?P<dbAddress>.+?:.+?@.+?\/.+$)", dbUrl)
|
||||
if not m:
|
||||
raise Exception("error in parsing SC3 DB url")
|
||||
|
||||
db = m.groupdict()
|
||||
|
||||
try:
|
||||
registry = seiscomp.system.PluginRegistry.Instance()
|
||||
registry.addPluginName("dbmysql")
|
||||
registry.loadPlugins()
|
||||
except Exception as e:
|
||||
raise #"Cannot load database driver: %s"
|
||||
|
||||
dbDriver = seiscomp.io.DatabaseInterface.Create(db["dbDriverName"])
|
||||
if dbDriver is None:
|
||||
raise Exception("Cannot find database driver " + db["dbDriverName"])
|
||||
|
||||
if not dbDriver.connect(db["dbAddress"]):
|
||||
raise Exception("Cannot connect to database at " + db["dbAddress"])
|
||||
|
||||
dbQuery = seiscomp.datamodel.DatabaseQuery(dbDriver)
|
||||
if dbQuery is None:
|
||||
raise Exception("Cannot get DB query object")
|
||||
|
||||
print(" Loading inventory from database ... ", end=' ', file=sys.stderr)
|
||||
inventory = seiscomp.datamodel.Inventory()
|
||||
dbQuery.loadNetworks(inventory)
|
||||
for ni in range(inventory.networkCount()):
|
||||
dbQuery.loadStations(inventory.network(ni))
|
||||
print("Done.", file=sys.stderr)
|
||||
if inventory:
|
||||
self.stationResolver.collectStations(inventory, True)
|
||||
|
||||
def _loadXml(self, folder):
|
||||
print(" Loading inventory from XML file ... ", end=' ', file=sys.stderr)
|
||||
for f in glob.glob(os.path.join(folder, "*.xml")):
|
||||
ar = seiscomp.io.XMLArchive()
|
||||
ar.open(f)
|
||||
inventory = seiscomp.datamodel.Inventory_Cast(ar.readObject())
|
||||
ar.close()
|
||||
|
||||
if inventory:
|
||||
self.stationResolver.collectStations(inventory)
|
||||
print("Done.", file=sys.stderr)
|
||||
|
||||
def digest(self, tabFilename):
|
||||
sas = []
|
||||
ias = []
|
||||
nw = None
|
||||
|
||||
n = None
|
||||
g = None
|
||||
print(" Parsing file: %s" % (tabFilename), file=sys.stderr)
|
||||
|
||||
if not tabFilename or not os.path.isfile(tabFilename):
|
||||
raise Exception("Supplied filename is invalid.")
|
||||
|
||||
if tabFilename in list(self.n.keys()) or tabFilename in list(self.g.keys()):
|
||||
raise Exception("File %s is already digested." % tabFilename)
|
||||
filename = 1
|
||||
try:
|
||||
fd = open(tabFilename)
|
||||
for line in fd:
|
||||
obj = None
|
||||
line = line.strip()
|
||||
if not line or line[0] == "#": continue
|
||||
if str(line).find(":") == -1:
|
||||
raise Exception("Invalid line format '%s'" % line)
|
||||
(Type, Content) = line.split(":",1)
|
||||
|
||||
if Type == "Nw":
|
||||
if n or g:
|
||||
raise Exception("Network or Station Group already defined, only one Hr line should be defined per file.")
|
||||
try:
|
||||
nw = Nw(Content)
|
||||
except Exception as e:
|
||||
raise Exception("Error while creating nw from '%s': %s" % (Content, e))
|
||||
try:
|
||||
for na in self.nas: nw.Na(na) # Defaults
|
||||
except Exception as e:
|
||||
raise Exception("Error while loading (defaults) %s into %s: %s" % (na, nw, e))
|
||||
|
||||
elif Type == "Sg":
|
||||
if n or g:
|
||||
raise Exception("Network or Station Group already defined, only one Hr line should be defined per file.")
|
||||
|
||||
try:
|
||||
sg = Sg(Content)
|
||||
except Exception as e:
|
||||
raise Exception("Error while creating sg from '%s': %s" % (Content, e))
|
||||
try:
|
||||
for na in self.nas: sg.Na(na) # Defaults
|
||||
except Exception as e:
|
||||
raise Exception("Error while loading (defaults) %s into %s: %s" % (na, sg, e))
|
||||
|
||||
elif Type == "Na":
|
||||
if not nw and not sg:
|
||||
raise Exception("No network defined, no Na line before a Hr line.")
|
||||
if n or g:
|
||||
raise Exception("No Na lines after a Sl line. Network has already been defined.")
|
||||
try:
|
||||
na = Na(Content)
|
||||
except Exception as e:
|
||||
raise Exception("Error while creating na from '%s': %s" % (Content, e))
|
||||
if nw:
|
||||
try:
|
||||
nw.Na(na)
|
||||
except Exception as e:
|
||||
raise Exception("Error while adding %s to %s: %s" % (na, nw, e))
|
||||
else:
|
||||
try:
|
||||
sg.Na(na)
|
||||
except Exception as e:
|
||||
raise Exception("Error while adding %s to %s: %s" % (na, sg, e))
|
||||
|
||||
|
||||
elif Type == "Sa":
|
||||
if not nw:
|
||||
raise Exception("Not Sa line before a hr line allowed.")
|
||||
try:
|
||||
sas.append(Sa(Content))
|
||||
except Exception as e:
|
||||
raise Exception("Error while creating Sa from '%s': %s" % (Content,e))
|
||||
|
||||
elif Type == "Sl":
|
||||
if not n:
|
||||
if not nw:
|
||||
raise Exception("No network defined, Hr line should come before station line.")
|
||||
else:
|
||||
n = Network(nw)
|
||||
for (filename, network) in self.n.items():
|
||||
if network.conflict(n):
|
||||
raise Exception("Network already defined %s (%s)-(%s) by file %s." % (network.code, network.start, network.end, filename))
|
||||
try:
|
||||
sl = Sl(Content)
|
||||
except Exception as e:
|
||||
raise Exception("Error while creating sl from '%s': %s" % (Content, e))
|
||||
# Fill in attributes
|
||||
try:
|
||||
for sa in self.sas: sl.Sa(sa) # Defaults
|
||||
except Exception as e:
|
||||
raise Exception("Error while loading (default) %s into %s: %s" % (sa, sl, e))
|
||||
try:
|
||||
for sa in sas: sl.Sa(sa) # Collected
|
||||
except Exception as e:
|
||||
raise Exception("Error while loading %s into %s: %s" % (str(sa), str(sl), e))
|
||||
# Digest by Station
|
||||
try:
|
||||
n.Sl(sl)
|
||||
except DontFit:
|
||||
raise Exception("%s does not fit in %s" % (sl, n))
|
||||
except Exception as e:
|
||||
raise Exception("Error while loading %s into %s: %s" % (sl, n, e))
|
||||
|
||||
elif Type == "Sr":
|
||||
if not g:
|
||||
if not sg:
|
||||
raise Exception("No station group defined, Sg line should come before station reference line.")
|
||||
else:
|
||||
g = StationGroup(sg)
|
||||
for (filename, stationGroup) in self.g.items():
|
||||
if stationGroup.conflict(g):
|
||||
raise Exception("Station group already defined %s (%s)-(%s) by file %s." % (stationGroup.code, stationGroup.start, stationGroup.end, filename))
|
||||
for (filename, network) in self.n.items():
|
||||
if network.conflict(g):
|
||||
raise Exception("Station group conflict network already defined %s (%s)-(%s) by file %s." % (network.code, network.start, network.end, filename))
|
||||
|
||||
try:
|
||||
sr = Sr(Content)
|
||||
except Exception as e:
|
||||
raise Exception("Error while creating sr from '%s': %s" % (Content, e))
|
||||
# Digest by Station Reference
|
||||
try:
|
||||
g.Sr(sr)
|
||||
except DontFit:
|
||||
raise Exception("%s does not fit in %s" % (sr, n))
|
||||
except Exception as e:
|
||||
raise Exception("Error while loading %s into %s: %s" % (sr, n, e))
|
||||
|
||||
elif Type == "Ia":
|
||||
ias.append(Ia(Content))
|
||||
|
||||
elif Type == "Se":
|
||||
obj = Se(Content)
|
||||
|
||||
elif Type == "Dl":
|
||||
obj = Dl(Content)
|
||||
|
||||
elif Type == "Cl":
|
||||
obj = Cl(Content)
|
||||
|
||||
elif Type == "Ff":
|
||||
obj = Ff(self._filterFolder, Content)
|
||||
|
||||
elif Type == "If":
|
||||
obj = Pz(Content,'D')
|
||||
|
||||
elif Type == "Pz":
|
||||
obj = Pz(Content,'A')
|
||||
else:
|
||||
print(" Ignored line", line, file=sys.stderr)
|
||||
|
||||
## Process Instrument
|
||||
if obj:
|
||||
try:
|
||||
for ia in self.ias: obj.Ia(ia) # Defaults
|
||||
except Exception as e:
|
||||
raise Exception("Error while loading (defaults) %s into %s: %s" % (ia, obj, e))
|
||||
try:
|
||||
for ia in ias: obj.Ia(ia) # Collected
|
||||
except Exception as e:
|
||||
raise Exception("Error while loading %s into %s: %s" % (ia, obj, e))
|
||||
try:
|
||||
self.i.add(obj)
|
||||
except Exception as e:
|
||||
raise Exception("Error while loading %s into Instruments db: %s" % (obj, e))
|
||||
obj = None
|
||||
|
||||
# Process Network
|
||||
if n:
|
||||
self.n[tabFilename] = n
|
||||
|
||||
# Process Station Group
|
||||
if g:
|
||||
self.g[tabFilename] = g
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
finally:
|
||||
if fd:
|
||||
fd.close()
|
||||
|
||||
def check(self):
|
||||
# Instrument alone check
|
||||
if self.i.keys:
|
||||
print("\nCheking Instruments Loaded:\n", file=sys.stderr)
|
||||
error = self.i.check(self.n)
|
||||
if error:
|
||||
for e in error: print(e, file=sys.stderr)
|
||||
else:
|
||||
print("\nNo instruments loaded", file=sys.stderr)
|
||||
|
||||
# Cross Check
|
||||
error = []
|
||||
if self.n:
|
||||
print("\nChecking Networks Loaded:\n", file=sys.stderr)
|
||||
for network in self.n.values():
|
||||
error.extend(network.check(self.i))
|
||||
if error:
|
||||
for e in error: print(e, file=sys.stderr)
|
||||
else:
|
||||
print("\nNo network/stations loaded.", file=sys.stderr)
|
||||
|
||||
def sc3Obj(self, sc3i = None):
|
||||
if not sc3i:
|
||||
sc3i = seiscomp.datamodel.Inventory()
|
||||
|
||||
for network in list(self.n.values()):
|
||||
sc3n = network.sc3Obj(self.i)
|
||||
sc3i.add(sc3n)
|
||||
|
||||
for sc3o in self.i.sc3Objs():
|
||||
sc3i.add(sc3o)
|
||||
|
||||
self.stationResolver.collectStations(sc3i)
|
||||
|
||||
for stationGroup in list(self.g.values()):
|
||||
sc3g = stationGroup.sc3Obj(self.stationResolver)
|
||||
sc3i.add(sc3g)
|
||||
|
||||
return sc3i
|
||||
31
lib/python/nettab/test/filters/q330_b100_1
Normal file
31
lib/python/nettab/test/filters/q330_b100_1
Normal file
@ -0,0 +1,31 @@
|
||||
0 1.219929e-16 0.000000e+00
|
||||
1 3.161921e-10 0.000000e+00
|
||||
2 -4.314652e-08 0.000000e+00
|
||||
3 -5.635558e-07 0.000000e+00
|
||||
4 -1.267008e-04 0.000000e+00
|
||||
5 3.658144e-03 0.000000e+00
|
||||
6 1.675314e-04 0.000000e+00
|
||||
7 -5.404505e-03 0.000000e+00
|
||||
8 1.278609e-02 0.000000e+00
|
||||
9 -1.803566e-02 0.000000e+00
|
||||
10 1.473116e-02 0.000000e+00
|
||||
11 3.226941e-03 0.000000e+00
|
||||
12 -3.859694e-02 0.000000e+00
|
||||
13 8.883527e-02 0.000000e+00
|
||||
14 -1.482427e-01 0.000000e+00
|
||||
15 2.177661e-01 0.000000e+00
|
||||
16 8.099144e-01 0.000000e+00
|
||||
17 1.245959e-01 0.000000e+00
|
||||
18 -1.230407e-01 0.000000e+00
|
||||
19 8.899753e-02 0.000000e+00
|
||||
20 -4.850157e-02 0.000000e+00
|
||||
21 1.425912e-02 0.000000e+00
|
||||
22 6.896391e-03 0.000000e+00
|
||||
23 -1.444342e-02 0.000000e+00
|
||||
24 1.242861e-02 0.000000e+00
|
||||
25 -6.568726e-03 0.000000e+00
|
||||
26 1.522040e-03 0.000000e+00
|
||||
27 3.142093e-03 0.000000e+00
|
||||
28 3.656274e-05 0.000000e+00
|
||||
29 -2.152995e-06 0.000000e+00
|
||||
30 -2.597827e-07 0.000000e+00
|
||||
65
lib/python/nettab/test/filters/q330_b100_100
Normal file
65
lib/python/nettab/test/filters/q330_b100_100
Normal file
@ -0,0 +1,65 @@
|
||||
0 1.315493e-11 0.000000e+00
|
||||
1 1.501065e-04 0.000000e+00
|
||||
2 1.339681e-02 0.000000e+00
|
||||
3 1.644292e-01 0.000000e+00
|
||||
4 5.688094e-01 0.000000e+00
|
||||
5 5.173835e-01 0.000000e+00
|
||||
6 -2.608360e-01 0.000000e+00
|
||||
7 -1.220329e-01 0.000000e+00
|
||||
8 2.571813e-01 0.000000e+00
|
||||
9 -2.029026e-01 0.000000e+00
|
||||
10 7.075881e-02 0.000000e+00
|
||||
11 3.879666e-02 0.000000e+00
|
||||
12 -1.143135e-01 0.000000e+00
|
||||
13 1.354797e-01 0.000000e+00
|
||||
14 -1.114475e-01 0.000000e+00
|
||||
15 6.705481e-02 0.000000e+00
|
||||
16 -1.927124e-02 0.000000e+00
|
||||
17 -2.093129e-02 0.000000e+00
|
||||
18 4.768056e-02 0.000000e+00
|
||||
19 -5.933829e-02 0.000000e+00
|
||||
20 5.757931e-02 0.000000e+00
|
||||
21 -4.623331e-02 0.000000e+00
|
||||
22 2.977715e-02 0.000000e+00
|
||||
23 -1.248294e-02 0.000000e+00
|
||||
24 -2.366075e-03 0.000000e+00
|
||||
25 1.278821e-02 0.000000e+00
|
||||
26 -1.846982e-02 0.000000e+00
|
||||
27 1.879725e-02 0.000000e+00
|
||||
28 -1.713865e-02 0.000000e+00
|
||||
29 1.278199e-02 0.000000e+00
|
||||
30 -7.675787e-03 0.000000e+00
|
||||
31 3.255159e-03 0.000000e+00
|
||||
32 -8.947563e-05 0.000000e+00
|
||||
33 -1.778758e-03 0.000000e+00
|
||||
34 2.596043e-03 0.000000e+00
|
||||
35 -2.666169e-03 0.000000e+00
|
||||
36 2.307403e-03 0.000000e+00
|
||||
37 -1.770516e-03 0.000000e+00
|
||||
38 1.218643e-03 0.000000e+00
|
||||
39 -7.460492e-04 0.000000e+00
|
||||
40 3.921752e-04 0.000000e+00
|
||||
41 -1.583665e-04 0.000000e+00
|
||||
42 2.437801e-05 0.000000e+00
|
||||
43 3.807573e-05 0.000000e+00
|
||||
44 -5.618048e-05 0.000000e+00
|
||||
45 5.152771e-05 0.000000e+00
|
||||
46 -3.856469e-05 0.000000e+00
|
||||
47 2.530286e-05 0.000000e+00
|
||||
48 -1.512465e-05 0.000000e+00
|
||||
49 8.739795e-06 0.000000e+00
|
||||
50 -4.648117e-06 0.000000e+00
|
||||
51 1.376276e-06 0.000000e+00
|
||||
52 7.042064e-07 0.000000e+00
|
||||
53 2.241873e-07 0.000000e+00
|
||||
54 -1.251026e-06 0.000000e+00
|
||||
55 1.066771e-07 0.000000e+00
|
||||
56 2.642876e-07 0.000000e+00
|
||||
57 3.226638e-07 0.000000e+00
|
||||
58 -8.074162e-08 0.000000e+00
|
||||
59 -1.099048e-07 0.000000e+00
|
||||
60 -3.325203e-08 0.000000e+00
|
||||
61 1.388506e-08 0.000000e+00
|
||||
62 1.056275e-08 0.000000e+00
|
||||
63 2.577911e-09 0.000000e+00
|
||||
64 -7.018623e-10 0.000000e+00
|
||||
67
lib/python/nettab/test/filters/q330_b100_20
Normal file
67
lib/python/nettab/test/filters/q330_b100_20
Normal file
@ -0,0 +1,67 @@
|
||||
0 -3.653417e-17 0.000000e+00
|
||||
1 3.674881e-08 0.000000e+00
|
||||
2 -4.270596e-07 0.000000e+00
|
||||
3 1.145020e-06 0.000000e+00
|
||||
4 -1.875941e-07 0.000000e+00
|
||||
5 -3.372737e-07 0.000000e+00
|
||||
6 2.787469e-06 0.000000e+00
|
||||
7 -3.744026e-06 0.000000e+00
|
||||
8 5.411719e-06 0.000000e+00
|
||||
9 7.473363e-06 0.000000e+00
|
||||
10 -5.177595e-04 0.000000e+00
|
||||
11 2.106768e-04 0.000000e+00
|
||||
12 4.632577e-05 0.000000e+00
|
||||
13 -6.082222e-04 0.000000e+00
|
||||
14 1.441747e-03 0.000000e+00
|
||||
15 -2.406265e-03 0.000000e+00
|
||||
16 3.225338e-03 0.000000e+00
|
||||
17 -3.506390e-03 0.000000e+00
|
||||
18 2.814411e-03 0.000000e+00
|
||||
19 -7.719714e-04 0.000000e+00
|
||||
20 -2.805119e-03 0.000000e+00
|
||||
21 7.778055e-03 0.000000e+00
|
||||
22 -1.358146e-02 0.000000e+00
|
||||
23 1.917646e-02 0.000000e+00
|
||||
24 -2.297035e-02 0.000000e+00
|
||||
25 2.403979e-02 0.000000e+00
|
||||
26 -2.209865e-02 0.000000e+00
|
||||
27 8.607339e-03 0.000000e+00
|
||||
28 1.175252e-02 0.000000e+00
|
||||
29 -4.477868e-02 0.000000e+00
|
||||
30 9.649231e-02 0.000000e+00
|
||||
31 -1.917548e-01 0.000000e+00
|
||||
32 5.276523e-01 0.000000e+00
|
||||
33 7.241670e-01 0.000000e+00
|
||||
34 -1.569053e-01 0.000000e+00
|
||||
35 4.425742e-02 0.000000e+00
|
||||
36 3.141684e-03 0.000000e+00
|
||||
37 -2.667144e-02 0.000000e+00
|
||||
38 3.615316e-02 0.000000e+00
|
||||
39 -3.856867e-02 0.000000e+00
|
||||
40 3.108417e-02 0.000000e+00
|
||||
41 -2.352589e-02 0.000000e+00
|
||||
42 1.532109e-02 0.000000e+00
|
||||
43 -7.403983e-03 0.000000e+00
|
||||
44 1.096454e-03 0.000000e+00
|
||||
45 3.097965e-03 0.000000e+00
|
||||
46 -5.193199e-03 0.000000e+00
|
||||
47 5.561311e-03 0.000000e+00
|
||||
48 -4.761101e-03 0.000000e+00
|
||||
49 3.382132e-03 0.000000e+00
|
||||
50 -1.920520e-03 0.000000e+00
|
||||
51 7.152175e-04 0.000000e+00
|
||||
52 7.677194e-05 0.000000e+00
|
||||
53 -4.518973e-04 0.000000e+00
|
||||
54 5.026997e-04 0.000000e+00
|
||||
55 -5.650370e-04 0.000000e+00
|
||||
56 -5.568005e-05 0.000000e+00
|
||||
57 1.577356e-05 0.000000e+00
|
||||
58 -1.419847e-06 0.000000e+00
|
||||
59 8.149094e-07 0.000000e+00
|
||||
60 6.807946e-07 0.000000e+00
|
||||
61 -1.252728e-06 0.000000e+00
|
||||
62 1.524350e-06 0.000000e+00
|
||||
63 -2.833359e-07 0.000000e+00
|
||||
64 -1.063838e-08 0.000000e+00
|
||||
65 1.257120e-09 0.000000e+00
|
||||
66 -5.429542e-11 0.000000e+00
|
||||
39
lib/python/nettab/test/filters/q330_b100_40
Normal file
39
lib/python/nettab/test/filters/q330_b100_40
Normal file
@ -0,0 +1,39 @@
|
||||
0 4.189518e-13 0.000000e+00
|
||||
1 3.303176e-04 0.000000e+00
|
||||
2 1.029213e-03 0.000000e+00
|
||||
3 -3.141228e-03 0.000000e+00
|
||||
4 2.057093e-04 0.000000e+00
|
||||
5 1.525213e-03 0.000000e+00
|
||||
6 -6.231927e-03 0.000000e+00
|
||||
7 1.048013e-02 0.000000e+00
|
||||
8 -1.312025e-02 0.000000e+00
|
||||
9 1.078214e-02 0.000000e+00
|
||||
10 -1.444550e-03 0.000000e+00
|
||||
11 -1.587295e-02 0.000000e+00
|
||||
12 3.950740e-02 0.000000e+00
|
||||
13 -6.510363e-02 0.000000e+00
|
||||
14 8.537156e-02 0.000000e+00
|
||||
15 -8.919134e-02 0.000000e+00
|
||||
16 5.006189e-02 0.000000e+00
|
||||
17 8.372328e-01 0.000000e+00
|
||||
18 2.667231e-01 0.000000e+00
|
||||
19 -1.666931e-01 0.000000e+00
|
||||
20 9.528399e-02 0.000000e+00
|
||||
21 -5.092177e-02 0.000000e+00
|
||||
22 1.614584e-02 0.000000e+00
|
||||
23 7.063624e-03 0.000000e+00
|
||||
24 -1.838771e-02 0.000000e+00
|
||||
25 1.994141e-02 0.000000e+00
|
||||
26 -1.548951e-02 0.000000e+00
|
||||
27 8.527354e-03 0.000000e+00
|
||||
28 -2.557887e-03 0.000000e+00
|
||||
29 -1.811026e-03 0.000000e+00
|
||||
30 2.426493e-03 0.000000e+00
|
||||
31 -3.757695e-03 0.000000e+00
|
||||
32 4.672927e-04 0.000000e+00
|
||||
33 6.330721e-04 0.000000e+00
|
||||
34 -1.568741e-06 0.000000e+00
|
||||
35 -1.254798e-05 0.000000e+00
|
||||
36 3.210405e-07 0.000000e+00
|
||||
37 -2.633241e-08 0.000000e+00
|
||||
38 -5.099975e-08 0.000000e+00
|
||||
81
lib/python/nettab/test/filters/q330_b100_50
Normal file
81
lib/python/nettab/test/filters/q330_b100_50
Normal file
@ -0,0 +1,81 @@
|
||||
0 6.915055e-16 0.000000e+00
|
||||
1 9.981469e-07 0.000000e+00
|
||||
2 8.986285e-05 0.000000e+00
|
||||
3 3.536859e-04 0.000000e+00
|
||||
4 -3.196747e-04 0.000000e+00
|
||||
5 2.398310e-04 0.000000e+00
|
||||
6 4.343304e-05 0.000000e+00
|
||||
7 -6.140379e-04 0.000000e+00
|
||||
8 1.450240e-03 0.000000e+00
|
||||
9 -2.414179e-03 0.000000e+00
|
||||
10 3.243791e-03 0.000000e+00
|
||||
11 -3.565280e-03 0.000000e+00
|
||||
12 2.956281e-03 0.000000e+00
|
||||
13 -1.048729e-03 0.000000e+00
|
||||
14 -2.353488e-03 0.000000e+00
|
||||
15 7.146584e-03 0.000000e+00
|
||||
16 -1.283558e-02 0.000000e+00
|
||||
17 1.849560e-02 0.000000e+00
|
||||
18 -2.280356e-02 0.000000e+00
|
||||
19 2.414348e-02 0.000000e+00
|
||||
20 -2.075420e-02 0.000000e+00
|
||||
21 1.085375e-02 0.000000e+00
|
||||
22 7.376841e-03 0.000000e+00
|
||||
23 -3.628054e-02 0.000000e+00
|
||||
24 8.073029e-02 0.000000e+00
|
||||
25 -1.563791e-01 0.000000e+00
|
||||
26 5.966318e-01 0.000000e+00
|
||||
27 6.616155e-01 0.000000e+00
|
||||
28 -1.985033e-01 0.000000e+00
|
||||
29 5.962802e-02 0.000000e+00
|
||||
30 -1.201563e-02 0.000000e+00
|
||||
31 -2.031269e-02 0.000000e+00
|
||||
32 3.489734e-02 0.000000e+00
|
||||
33 -3.783039e-02 0.000000e+00
|
||||
34 3.414802e-02 0.000000e+00
|
||||
35 -2.681871e-02 0.000000e+00
|
||||
36 1.805448e-02 0.000000e+00
|
||||
37 -9.684112e-03 0.000000e+00
|
||||
38 1.924548e-03 0.000000e+00
|
||||
39 2.270220e-03 0.000000e+00
|
||||
40 -4.929948e-03 0.000000e+00
|
||||
41 5.783542e-03 0.000000e+00
|
||||
42 -5.278113e-03 0.000000e+00
|
||||
43 4.012361e-03 0.000000e+00
|
||||
44 -2.512171e-03 0.000000e+00
|
||||
45 1.166119e-03 0.000000e+00
|
||||
46 -1.915292e-04 0.000000e+00
|
||||
47 -3.549948e-04 0.000000e+00
|
||||
48 5.355819e-04 0.000000e+00
|
||||
49 -4.810171e-04 0.000000e+00
|
||||
50 4.186318e-04 0.000000e+00
|
||||
51 7.809605e-05 0.000000e+00
|
||||
52 -5.470072e-06 0.000000e+00
|
||||
53 -2.123757e-06 0.000000e+00
|
||||
54 -6.620526e-07 0.000000e+00
|
||||
55 7.238966e-07 0.000000e+00
|
||||
56 1.013226e-06 0.000000e+00
|
||||
57 -1.929203e-06 0.000000e+00
|
||||
58 7.801228e-07 0.000000e+00
|
||||
59 -7.887565e-07 0.000000e+00
|
||||
60 5.818626e-07 0.000000e+00
|
||||
61 3.221050e-08 0.000000e+00
|
||||
62 -1.076378e-07 0.000000e+00
|
||||
63 1.999555e-08 0.000000e+00
|
||||
64 -7.052141e-08 0.000000e+00
|
||||
65 -1.357645e-08 0.000000e+00
|
||||
66 -3.311185e-08 0.000000e+00
|
||||
67 1.552117e-08 0.000000e+00
|
||||
68 -5.395556e-09 0.000000e+00
|
||||
69 7.791274e-09 0.000000e+00
|
||||
70 2.075919e-10 0.000000e+00
|
||||
71 -9.326780e-10 0.000000e+00
|
||||
72 1.850689e-09 0.000000e+00
|
||||
73 -1.973863e-09 0.000000e+00
|
||||
74 1.334281e-09 0.000000e+00
|
||||
75 -6.315467e-10 0.000000e+00
|
||||
76 6.994718e-11 0.000000e+00
|
||||
77 1.148694e-10 0.000000e+00
|
||||
78 -5.595614e-11 0.000000e+00
|
||||
79 5.760568e-12 0.000000e+00
|
||||
80 -5.489862e-12 0.000000e+00
|
||||
400
lib/python/nettab/test/filters/scp_deci10.1
Normal file
400
lib/python/nettab/test/filters/scp_deci10.1
Normal file
@ -0,0 +1,400 @@
|
||||
0 -1.280410E-09 0.000000E+00
|
||||
1 9.089140E-09 0.000000E+00
|
||||
2 2.857200E-08 0.000000E+00
|
||||
3 7.068940E-08 0.000000E+00
|
||||
4 1.503850E-07 0.000000E+00
|
||||
5 2.898420E-07 0.000000E+00
|
||||
6 5.199920E-07 0.000000E+00
|
||||
7 8.824160E-07 0.000000E+00
|
||||
8 1.431250E-06 0.000000E+00
|
||||
9 2.234920E-06 0.000000E+00
|
||||
10 3.377490E-06 0.000000E+00
|
||||
11 4.959500E-06 0.000000E+00
|
||||
12 7.097790E-06 0.000000E+00
|
||||
13 9.924440E-06 0.000000E+00
|
||||
14 1.358420E-05 0.000000E+00
|
||||
15 1.823040E-05 0.000000E+00
|
||||
16 2.401920E-05 0.000000E+00
|
||||
17 3.110180E-05 0.000000E+00
|
||||
18 3.961540E-05 0.000000E+00
|
||||
19 4.967160E-05 0.000000E+00
|
||||
20 6.134480E-05 0.000000E+00
|
||||
21 7.465790E-05 0.000000E+00
|
||||
22 8.956970E-05 0.000000E+00
|
||||
23 1.059620E-04 0.000000E+00
|
||||
24 1.236260E-04 0.000000E+00
|
||||
25 1.422580E-04 0.000000E+00
|
||||
26 1.614470E-04 0.000000E+00
|
||||
27 1.806800E-04 0.000000E+00
|
||||
28 1.993440E-04 0.000000E+00
|
||||
29 2.167350E-04 0.000000E+00
|
||||
30 2.320800E-04 0.000000E+00
|
||||
31 2.445590E-04 0.000000E+00
|
||||
32 2.533370E-04 0.000000E+00
|
||||
33 2.576020E-04 0.000000E+00
|
||||
34 2.566110E-04 0.000000E+00
|
||||
35 2.497330E-04 0.000000E+00
|
||||
36 2.364990E-04 0.000000E+00
|
||||
37 2.166500E-04 0.000000E+00
|
||||
38 1.901760E-04 0.000000E+00
|
||||
39 1.573550E-04 0.000000E+00
|
||||
40 1.187790E-04 0.000000E+00
|
||||
41 7.536150E-05 0.000000E+00
|
||||
42 2.833800E-05 0.000000E+00
|
||||
43 -2.075750E-05 0.000000E+00
|
||||
44 -7.013260E-05 0.000000E+00
|
||||
45 -1.177970E-04 0.000000E+00
|
||||
46 -1.616380E-04 0.000000E+00
|
||||
47 -1.995190E-04 0.000000E+00
|
||||
48 -2.293810E-04 0.000000E+00
|
||||
49 -2.493630E-04 0.000000E+00
|
||||
50 -2.579120E-04 0.000000E+00
|
||||
51 -2.539050E-04 0.000000E+00
|
||||
52 -2.367430E-04 0.000000E+00
|
||||
53 -2.064400E-04 0.000000E+00
|
||||
54 -1.636770E-04 0.000000E+00
|
||||
55 -1.098340E-04 0.000000E+00
|
||||
56 -4.697750E-05 0.000000E+00
|
||||
57 2.218660E-05 0.000000E+00
|
||||
58 9.440430E-05 0.000000E+00
|
||||
59 1.660030E-04 0.000000E+00
|
||||
60 2.330560E-04 0.000000E+00
|
||||
61 2.915810E-04 0.000000E+00
|
||||
62 3.377580E-04 0.000000E+00
|
||||
63 3.681570E-04 0.000000E+00
|
||||
64 3.799620E-04 0.000000E+00
|
||||
65 3.711900E-04 0.000000E+00
|
||||
66 3.408650E-04 0.000000E+00
|
||||
67 2.891620E-04 0.000000E+00
|
||||
68 2.174900E-04 0.000000E+00
|
||||
69 1.285060E-04 0.000000E+00
|
||||
70 2.606830E-05 0.000000E+00
|
||||
71 -8.490010E-05 0.000000E+00
|
||||
72 -1.986100E-04 0.000000E+00
|
||||
73 -3.086790E-04 0.000000E+00
|
||||
74 -4.084630E-04 0.000000E+00
|
||||
75 -4.914240E-04 0.000000E+00
|
||||
76 -5.515290E-04 0.000000E+00
|
||||
77 -5.836450E-04 0.000000E+00
|
||||
78 -5.839130E-04 0.000000E+00
|
||||
79 -5.500750E-04 0.000000E+00
|
||||
80 -4.817300E-04 0.000000E+00
|
||||
81 -3.804970E-04 0.000000E+00
|
||||
82 -2.500650E-04 0.000000E+00
|
||||
83 -9.613190E-05 0.000000E+00
|
||||
84 7.379770E-05 0.000000E+00
|
||||
85 2.507300E-04 0.000000E+00
|
||||
86 4.246150E-04 0.000000E+00
|
||||
87 5.848830E-04 0.000000E+00
|
||||
88 7.210410E-04 0.000000E+00
|
||||
89 8.233180E-04 0.000000E+00
|
||||
90 8.833110E-04 0.000000E+00
|
||||
91 8.945860E-04 0.000000E+00
|
||||
92 8.532140E-04 0.000000E+00
|
||||
93 7.581840E-04 0.000000E+00
|
||||
94 6.116610E-04 0.000000E+00
|
||||
95 4.190820E-04 0.000000E+00
|
||||
96 1.890410E-04 0.000000E+00
|
||||
97 -6.701870E-05 0.000000E+00
|
||||
98 -3.353110E-04 0.000000E+00
|
||||
99 -6.003940E-04 0.000000E+00
|
||||
100 -8.460070E-04 0.000000E+00
|
||||
101 -1.056010E-03 0.000000E+00
|
||||
102 -1.215390E-03 0.000000E+00
|
||||
103 -1.311250E-03 0.000000E+00
|
||||
104 -1.333740E-03 0.000000E+00
|
||||
105 -1.276860E-03 0.000000E+00
|
||||
106 -1.139110E-03 0.000000E+00
|
||||
107 -9.238090E-04 0.000000E+00
|
||||
108 -6.392740E-04 0.000000E+00
|
||||
109 -2.985730E-04 0.000000E+00
|
||||
110 8.095210E-05 0.000000E+00
|
||||
111 4.784920E-04 0.000000E+00
|
||||
112 8.708350E-04 0.000000E+00
|
||||
113 1.233650E-03 0.000000E+00
|
||||
114 1.542910E-03 0.000000E+00
|
||||
115 1.776410E-03 0.000000E+00
|
||||
116 1.915250E-03 0.000000E+00
|
||||
117 1.945200E-03 0.000000E+00
|
||||
118 1.857870E-03 0.000000E+00
|
||||
119 1.651590E-03 0.000000E+00
|
||||
120 1.331930E-03 0.000000E+00
|
||||
121 9.117790E-04 0.000000E+00
|
||||
122 4.110140E-04 0.000000E+00
|
||||
123 -1.443240E-04 0.000000E+00
|
||||
124 -7.232630E-04 0.000000E+00
|
||||
125 -1.291520E-03 0.000000E+00
|
||||
126 -1.813440E-03 0.000000E+00
|
||||
127 -2.254090E-03 0.000000E+00
|
||||
128 -2.581490E-03 0.000000E+00
|
||||
129 -2.768760E-03 0.000000E+00
|
||||
130 -2.796120E-03 0.000000E+00
|
||||
131 -2.652470E-03 0.000000E+00
|
||||
132 -2.336640E-03 0.000000E+00
|
||||
133 -1.858050E-03 0.000000E+00
|
||||
134 -1.236750E-03 0.000000E+00
|
||||
135 -5.027860E-04 0.000000E+00
|
||||
136 3.050470E-04 0.000000E+00
|
||||
137 1.141090E-03 0.000000E+00
|
||||
138 1.955230E-03 0.000000E+00
|
||||
139 2.695760E-03 0.000000E+00
|
||||
140 3.312460E-03 0.000000E+00
|
||||
141 3.759760E-03 0.000000E+00
|
||||
142 3.999910E-03 0.000000E+00
|
||||
143 4.005660E-03 0.000000E+00
|
||||
144 3.762670E-03 0.000000E+00
|
||||
145 3.271090E-03 0.000000E+00
|
||||
146 2.546440E-03 0.000000E+00
|
||||
147 1.619580E-03 0.000000E+00
|
||||
148 5.357070E-04 0.000000E+00
|
||||
149 -6.475150E-04 0.000000E+00
|
||||
150 -1.862780E-03 0.000000E+00
|
||||
151 -3.036670E-03 0.000000E+00
|
||||
152 -4.093770E-03 0.000000E+00
|
||||
153 -4.961150E-03 0.000000E+00
|
||||
154 -5.573010E-03 0.000000E+00
|
||||
155 -5.875080E-03 0.000000E+00
|
||||
156 -5.828670E-03 0.000000E+00
|
||||
157 -5.414010E-03 0.000000E+00
|
||||
158 -4.632620E-03 0.000000E+00
|
||||
159 -3.508570E-03 0.000000E+00
|
||||
160 -2.088510E-03 0.000000E+00
|
||||
161 -4.402630E-04 0.000000E+00
|
||||
162 1.349800E-03 0.000000E+00
|
||||
163 3.180770E-03 0.000000E+00
|
||||
164 4.942220E-03 0.000000E+00
|
||||
165 6.520130E-03 0.000000E+00
|
||||
166 7.803440E-03 0.000000E+00
|
||||
167 8.690760E-03 0.000000E+00
|
||||
168 9.097010E-03 0.000000E+00
|
||||
169 8.959570E-03 0.000000E+00
|
||||
170 8.243470E-03 0.000000E+00
|
||||
171 6.945480E-03 0.000000E+00
|
||||
172 5.096570E-03 0.000000E+00
|
||||
173 2.762750E-03 0.000000E+00
|
||||
174 4.398920E-05 0.000000E+00
|
||||
175 -2.928690E-03 0.000000E+00
|
||||
176 -5.998030E-03 0.000000E+00
|
||||
177 -8.986910E-03 0.000000E+00
|
||||
178 -1.170620E-02 0.000000E+00
|
||||
179 -1.396360E-02 0.000000E+00
|
||||
180 -1.557300E-02 0.000000E+00
|
||||
181 -1.636440E-02 0.000000E+00
|
||||
182 -1.619300E-02 0.000000E+00
|
||||
183 -1.494760E-02 0.000000E+00
|
||||
184 -1.255800E-02 0.000000E+00
|
||||
185 -9.000540E-03 0.000000E+00
|
||||
186 -4.301130E-03 0.000000E+00
|
||||
187 1.463060E-03 0.000000E+00
|
||||
188 8.165080E-03 0.000000E+00
|
||||
189 1.563180E-02 0.000000E+00
|
||||
190 2.364960E-02 0.000000E+00
|
||||
191 3.197290E-02 0.000000E+00
|
||||
192 4.033310E-02 0.000000E+00
|
||||
193 4.845020E-02 0.000000E+00
|
||||
194 5.604420E-02 0.000000E+00
|
||||
195 6.284710E-02 0.000000E+00
|
||||
196 6.861480E-02 0.000000E+00
|
||||
197 7.313740E-02 0.000000E+00
|
||||
198 7.624880E-02 0.000000E+00
|
||||
199 7.783390E-02 0.000000E+00
|
||||
200 7.783390E-02 0.000000E+00
|
||||
201 7.624880E-02 0.000000E+00
|
||||
202 7.313740E-02 0.000000E+00
|
||||
203 6.861480E-02 0.000000E+00
|
||||
204 6.284710E-02 0.000000E+00
|
||||
205 5.604420E-02 0.000000E+00
|
||||
206 4.845020E-02 0.000000E+00
|
||||
207 4.033310E-02 0.000000E+00
|
||||
208 3.197290E-02 0.000000E+00
|
||||
209 2.364960E-02 0.000000E+00
|
||||
210 1.563180E-02 0.000000E+00
|
||||
211 8.165080E-03 0.000000E+00
|
||||
212 1.463060E-03 0.000000E+00
|
||||
213 -4.301130E-03 0.000000E+00
|
||||
214 -9.000540E-03 0.000000E+00
|
||||
215 -1.255800E-02 0.000000E+00
|
||||
216 -1.494760E-02 0.000000E+00
|
||||
217 -1.619300E-02 0.000000E+00
|
||||
218 -1.636440E-02 0.000000E+00
|
||||
219 -1.557300E-02 0.000000E+00
|
||||
220 -1.396360E-02 0.000000E+00
|
||||
221 -1.170620E-02 0.000000E+00
|
||||
222 -8.986910E-03 0.000000E+00
|
||||
223 -5.998030E-03 0.000000E+00
|
||||
224 -2.928690E-03 0.000000E+00
|
||||
225 4.398920E-05 0.000000E+00
|
||||
226 2.762750E-03 0.000000E+00
|
||||
227 5.096570E-03 0.000000E+00
|
||||
228 6.945480E-03 0.000000E+00
|
||||
229 8.243470E-03 0.000000E+00
|
||||
230 8.959570E-03 0.000000E+00
|
||||
231 9.097010E-03 0.000000E+00
|
||||
232 8.690760E-03 0.000000E+00
|
||||
233 7.803440E-03 0.000000E+00
|
||||
234 6.520130E-03 0.000000E+00
|
||||
235 4.942220E-03 0.000000E+00
|
||||
236 3.180770E-03 0.000000E+00
|
||||
237 1.349800E-03 0.000000E+00
|
||||
238 -4.402630E-04 0.000000E+00
|
||||
239 -2.088510E-03 0.000000E+00
|
||||
240 -3.508570E-03 0.000000E+00
|
||||
241 -4.632620E-03 0.000000E+00
|
||||
242 -5.414010E-03 0.000000E+00
|
||||
243 -5.828670E-03 0.000000E+00
|
||||
244 -5.875080E-03 0.000000E+00
|
||||
245 -5.573010E-03 0.000000E+00
|
||||
246 -4.961150E-03 0.000000E+00
|
||||
247 -4.093770E-03 0.000000E+00
|
||||
248 -3.036670E-03 0.000000E+00
|
||||
249 -1.862780E-03 0.000000E+00
|
||||
250 -6.475150E-04 0.000000E+00
|
||||
251 5.357070E-04 0.000000E+00
|
||||
252 1.619580E-03 0.000000E+00
|
||||
253 2.546440E-03 0.000000E+00
|
||||
254 3.271090E-03 0.000000E+00
|
||||
255 3.762670E-03 0.000000E+00
|
||||
256 4.005660E-03 0.000000E+00
|
||||
257 3.999910E-03 0.000000E+00
|
||||
258 3.759760E-03 0.000000E+00
|
||||
259 3.312460E-03 0.000000E+00
|
||||
260 2.695760E-03 0.000000E+00
|
||||
261 1.955230E-03 0.000000E+00
|
||||
262 1.141090E-03 0.000000E+00
|
||||
263 3.050470E-04 0.000000E+00
|
||||
264 -5.027860E-04 0.000000E+00
|
||||
265 -1.236750E-03 0.000000E+00
|
||||
266 -1.858050E-03 0.000000E+00
|
||||
267 -2.336640E-03 0.000000E+00
|
||||
268 -2.652470E-03 0.000000E+00
|
||||
269 -2.796120E-03 0.000000E+00
|
||||
270 -2.768760E-03 0.000000E+00
|
||||
271 -2.581490E-03 0.000000E+00
|
||||
272 -2.254090E-03 0.000000E+00
|
||||
273 -1.813440E-03 0.000000E+00
|
||||
274 -1.291520E-03 0.000000E+00
|
||||
275 -7.232630E-04 0.000000E+00
|
||||
276 -1.443240E-04 0.000000E+00
|
||||
277 4.110140E-04 0.000000E+00
|
||||
278 9.117790E-04 0.000000E+00
|
||||
279 1.331930E-03 0.000000E+00
|
||||
280 1.651590E-03 0.000000E+00
|
||||
281 1.857870E-03 0.000000E+00
|
||||
282 1.945200E-03 0.000000E+00
|
||||
283 1.915250E-03 0.000000E+00
|
||||
284 1.776410E-03 0.000000E+00
|
||||
285 1.542910E-03 0.000000E+00
|
||||
286 1.233650E-03 0.000000E+00
|
||||
287 8.708350E-04 0.000000E+00
|
||||
288 4.784920E-04 0.000000E+00
|
||||
289 8.095210E-05 0.000000E+00
|
||||
290 -2.985730E-04 0.000000E+00
|
||||
291 -6.392740E-04 0.000000E+00
|
||||
292 -9.238090E-04 0.000000E+00
|
||||
293 -1.139110E-03 0.000000E+00
|
||||
294 -1.276860E-03 0.000000E+00
|
||||
295 -1.333740E-03 0.000000E+00
|
||||
296 -1.311250E-03 0.000000E+00
|
||||
297 -1.215390E-03 0.000000E+00
|
||||
298 -1.056010E-03 0.000000E+00
|
||||
299 -8.460070E-04 0.000000E+00
|
||||
300 -6.003940E-04 0.000000E+00
|
||||
301 -3.353110E-04 0.000000E+00
|
||||
302 -6.701870E-05 0.000000E+00
|
||||
303 1.890410E-04 0.000000E+00
|
||||
304 4.190820E-04 0.000000E+00
|
||||
305 6.116610E-04 0.000000E+00
|
||||
306 7.581840E-04 0.000000E+00
|
||||
307 8.532140E-04 0.000000E+00
|
||||
308 8.945860E-04 0.000000E+00
|
||||
309 8.833110E-04 0.000000E+00
|
||||
310 8.233180E-04 0.000000E+00
|
||||
311 7.210410E-04 0.000000E+00
|
||||
312 5.848830E-04 0.000000E+00
|
||||
313 4.246150E-04 0.000000E+00
|
||||
314 2.507300E-04 0.000000E+00
|
||||
315 7.379770E-05 0.000000E+00
|
||||
316 -9.613190E-05 0.000000E+00
|
||||
317 -2.500650E-04 0.000000E+00
|
||||
318 -3.804970E-04 0.000000E+00
|
||||
319 -4.817300E-04 0.000000E+00
|
||||
320 -5.500750E-04 0.000000E+00
|
||||
321 -5.839130E-04 0.000000E+00
|
||||
322 -5.836450E-04 0.000000E+00
|
||||
323 -5.515290E-04 0.000000E+00
|
||||
324 -4.914240E-04 0.000000E+00
|
||||
325 -4.084630E-04 0.000000E+00
|
||||
326 -3.086790E-04 0.000000E+00
|
||||
327 -1.986100E-04 0.000000E+00
|
||||
328 -8.490010E-05 0.000000E+00
|
||||
329 2.606830E-05 0.000000E+00
|
||||
330 1.285060E-04 0.000000E+00
|
||||
331 2.174900E-04 0.000000E+00
|
||||
332 2.891620E-04 0.000000E+00
|
||||
333 3.408650E-04 0.000000E+00
|
||||
334 3.711900E-04 0.000000E+00
|
||||
335 3.799620E-04 0.000000E+00
|
||||
336 3.681570E-04 0.000000E+00
|
||||
337 3.377580E-04 0.000000E+00
|
||||
338 2.915810E-04 0.000000E+00
|
||||
339 2.330560E-04 0.000000E+00
|
||||
340 1.660030E-04 0.000000E+00
|
||||
341 9.440430E-05 0.000000E+00
|
||||
342 2.218660E-05 0.000000E+00
|
||||
343 -4.697750E-05 0.000000E+00
|
||||
344 -1.098340E-04 0.000000E+00
|
||||
345 -1.636770E-04 0.000000E+00
|
||||
346 -2.064400E-04 0.000000E+00
|
||||
347 -2.367430E-04 0.000000E+00
|
||||
348 -2.539050E-04 0.000000E+00
|
||||
349 -2.579120E-04 0.000000E+00
|
||||
350 -2.493630E-04 0.000000E+00
|
||||
351 -2.293810E-04 0.000000E+00
|
||||
352 -1.995190E-04 0.000000E+00
|
||||
353 -1.616380E-04 0.000000E+00
|
||||
354 -1.177970E-04 0.000000E+00
|
||||
355 -7.013260E-05 0.000000E+00
|
||||
356 -2.075750E-05 0.000000E+00
|
||||
357 2.833800E-05 0.000000E+00
|
||||
358 7.536150E-05 0.000000E+00
|
||||
359 1.187790E-04 0.000000E+00
|
||||
360 1.573550E-04 0.000000E+00
|
||||
361 1.901760E-04 0.000000E+00
|
||||
362 2.166500E-04 0.000000E+00
|
||||
363 2.364990E-04 0.000000E+00
|
||||
364 2.497330E-04 0.000000E+00
|
||||
365 2.566110E-04 0.000000E+00
|
||||
366 2.576020E-04 0.000000E+00
|
||||
367 2.533370E-04 0.000000E+00
|
||||
368 2.445590E-04 0.000000E+00
|
||||
369 2.320800E-04 0.000000E+00
|
||||
370 2.167350E-04 0.000000E+00
|
||||
371 1.993440E-04 0.000000E+00
|
||||
372 1.806800E-04 0.000000E+00
|
||||
373 1.614470E-04 0.000000E+00
|
||||
374 1.422580E-04 0.000000E+00
|
||||
375 1.236260E-04 0.000000E+00
|
||||
376 1.059620E-04 0.000000E+00
|
||||
377 8.956970E-05 0.000000E+00
|
||||
378 7.465790E-05 0.000000E+00
|
||||
379 6.134480E-05 0.000000E+00
|
||||
380 4.967160E-05 0.000000E+00
|
||||
381 3.961540E-05 0.000000E+00
|
||||
382 3.110180E-05 0.000000E+00
|
||||
383 2.401920E-05 0.000000E+00
|
||||
384 1.823040E-05 0.000000E+00
|
||||
385 1.358420E-05 0.000000E+00
|
||||
386 9.924440E-06 0.000000E+00
|
||||
387 7.097790E-06 0.000000E+00
|
||||
388 4.959500E-06 0.000000E+00
|
||||
389 3.377490E-06 0.000000E+00
|
||||
390 2.234920E-06 0.000000E+00
|
||||
391 1.431250E-06 0.000000E+00
|
||||
392 8.824160E-07 0.000000E+00
|
||||
393 5.199920E-07 0.000000E+00
|
||||
394 2.898420E-07 0.000000E+00
|
||||
395 1.503850E-07 0.000000E+00
|
||||
396 7.068940E-08 0.000000E+00
|
||||
397 2.857200E-08 0.000000E+00
|
||||
398 9.089140E-09 0.000000E+00
|
||||
399 -1.280410E-09 0.000000E+00
|
||||
96
lib/python/nettab/test/filters/scp_deci2.1
Normal file
96
lib/python/nettab/test/filters/scp_deci2.1
Normal file
@ -0,0 +1,96 @@
|
||||
0 -4.624365e-06 0.000000e+00
|
||||
1 -8.258298e-05 0.000000e+00
|
||||
2 -2.260141e-04 0.000000e+00
|
||||
3 -2.539009e-04 0.000000e+00
|
||||
4 7.665667e-07 0.000000e+00
|
||||
5 3.050186e-04 0.000000e+00
|
||||
6 1.712792e-04 0.000000e+00
|
||||
7 -3.494469e-04 0.000000e+00
|
||||
8 -4.491013e-04 0.000000e+00
|
||||
9 2.631577e-04 0.000000e+00
|
||||
10 7.897725e-04 0.000000e+00
|
||||
11 3.857301e-05 0.000000e+00
|
||||
12 -1.091783e-03 0.000000e+00
|
||||
13 -5.999956e-04 0.000000e+00
|
||||
14 1.206435e-03 0.000000e+00
|
||||
15 1.397154e-03 0.000000e+00
|
||||
16 -9.624677e-04 0.000000e+00
|
||||
17 -2.313273e-03 0.000000e+00
|
||||
18 2.078273e-04 0.000000e+00
|
||||
19 3.130074e-03 0.000000e+00
|
||||
20 1.137016e-03 0.000000e+00
|
||||
21 -3.543348e-03 0.000000e+00
|
||||
22 -3.024242e-03 0.000000e+00
|
||||
23 3.207636e-03 0.000000e+00
|
||||
24 5.238007e-03 0.000000e+00
|
||||
25 -1.803839e-03 0.000000e+00
|
||||
26 -7.375909e-03 0.000000e+00
|
||||
27 -8.729728e-04 0.000000e+00
|
||||
28 8.870910e-03 0.000000e+00
|
||||
29 4.831847e-03 0.000000e+00
|
||||
30 -9.042305e-03 0.000000e+00
|
||||
31 -9.813905e-03 0.000000e+00
|
||||
32 7.179136e-03 0.000000e+00
|
||||
33 1.525300e-02 0.000000e+00
|
||||
34 -2.628732e-03 0.000000e+00
|
||||
35 -2.026759e-02 0.000000e+00
|
||||
36 -5.142914e-03 0.000000e+00
|
||||
37 2.366362e-02 0.000000e+00
|
||||
38 1.657857e-02 0.000000e+00
|
||||
39 -2.387548e-02 0.000000e+00
|
||||
40 -3.227953e-02 0.000000e+00
|
||||
41 1.860678e-02 0.000000e+00
|
||||
42 5.394208e-02 0.000000e+00
|
||||
43 -3.140518e-03 0.000000e+00
|
||||
44 -8.849621e-02 0.000000e+00
|
||||
45 -4.014856e-02 0.000000e+00
|
||||
46 1.847636e-01 0.000000e+00
|
||||
47 4.066011e-01 0.000000e+00
|
||||
48 4.066011e-01 0.000000e+00
|
||||
49 1.847636e-01 0.000000e+00
|
||||
50 -4.014856e-02 0.000000e+00
|
||||
51 -8.849621e-02 0.000000e+00
|
||||
52 -3.140518e-03 0.000000e+00
|
||||
53 5.394208e-02 0.000000e+00
|
||||
54 1.860678e-02 0.000000e+00
|
||||
55 -3.227953e-02 0.000000e+00
|
||||
56 -2.387548e-02 0.000000e+00
|
||||
57 1.657857e-02 0.000000e+00
|
||||
58 2.366362e-02 0.000000e+00
|
||||
59 -5.142914e-03 0.000000e+00
|
||||
60 -2.026759e-02 0.000000e+00
|
||||
61 -2.628732e-03 0.000000e+00
|
||||
62 1.525300e-02 0.000000e+00
|
||||
63 7.179136e-03 0.000000e+00
|
||||
64 -9.813905e-03 0.000000e+00
|
||||
65 -9.042305e-03 0.000000e+00
|
||||
66 4.831847e-03 0.000000e+00
|
||||
67 8.870910e-03 0.000000e+00
|
||||
68 -8.729728e-04 0.000000e+00
|
||||
69 -7.375909e-03 0.000000e+00
|
||||
70 -1.803839e-03 0.000000e+00
|
||||
71 5.238007e-03 0.000000e+00
|
||||
72 3.207636e-03 0.000000e+00
|
||||
73 -3.024242e-03 0.000000e+00
|
||||
74 -3.543348e-03 0.000000e+00
|
||||
75 1.137016e-03 0.000000e+00
|
||||
76 3.130074e-03 0.000000e+00
|
||||
77 2.078273e-04 0.000000e+00
|
||||
78 -2.313273e-03 0.000000e+00
|
||||
79 -9.624677e-04 0.000000e+00
|
||||
80 1.397154e-03 0.000000e+00
|
||||
81 1.206435e-03 0.000000e+00
|
||||
82 -5.999956e-04 0.000000e+00
|
||||
83 -1.091783e-03 0.000000e+00
|
||||
84 3.857301e-05 0.000000e+00
|
||||
85 7.897725e-04 0.000000e+00
|
||||
86 2.631577e-04 0.000000e+00
|
||||
87 -4.491013e-04 0.000000e+00
|
||||
88 -3.494469e-04 0.000000e+00
|
||||
89 1.712792e-04 0.000000e+00
|
||||
90 3.050186e-04 0.000000e+00
|
||||
91 7.665667e-07 0.000000e+00
|
||||
92 -2.539009e-04 0.000000e+00
|
||||
93 -2.260141e-04 0.000000e+00
|
||||
94 -8.258298e-05 0.000000e+00
|
||||
95 -4.624365e-06 0.000000e+00
|
||||
160
lib/python/nettab/test/filters/scp_deci5.1
Normal file
160
lib/python/nettab/test/filters/scp_deci5.1
Normal file
@ -0,0 +1,160 @@
|
||||
0 4.032461e-05 0.000000e+00
|
||||
1 7.453280e-05 0.000000e+00
|
||||
2 1.234553e-04 0.000000e+00
|
||||
3 1.701887e-04 0.000000e+00
|
||||
4 1.973105e-04 0.000000e+00
|
||||
5 1.854891e-04 0.000000e+00
|
||||
6 1.193456e-04 0.000000e+00
|
||||
7 -5.723101e-06 0.000000e+00
|
||||
8 -1.779232e-04 0.000000e+00
|
||||
9 -3.673259e-04 0.000000e+00
|
||||
10 -5.295104e-04 0.000000e+00
|
||||
11 -6.150085e-04 0.000000e+00
|
||||
12 -5.832354e-04 0.000000e+00
|
||||
13 -4.172837e-04 0.000000e+00
|
||||
14 -1.349516e-04 0.000000e+00
|
||||
15 2.083330e-04 0.000000e+00
|
||||
16 5.277090e-04 0.000000e+00
|
||||
17 7.281899e-04 0.000000e+00
|
||||
18 7.312587e-04 0.000000e+00
|
||||
19 5.019202e-04 0.000000e+00
|
||||
20 6.783176e-05 0.000000e+00
|
||||
21 -4.771493e-04 0.000000e+00
|
||||
22 -9.891580e-04 0.000000e+00
|
||||
23 -1.308918e-03 0.000000e+00
|
||||
24 -1.307358e-03 0.000000e+00
|
||||
25 -9.300168e-04 0.000000e+00
|
||||
26 -2.262541e-04 0.000000e+00
|
||||
27 6.483476e-04 0.000000e+00
|
||||
28 1.461708e-03 0.000000e+00
|
||||
29 1.963222e-03 0.000000e+00
|
||||
30 1.956625e-03 0.000000e+00
|
||||
31 1.367725e-03 0.000000e+00
|
||||
32 2.854628e-04 0.000000e+00
|
||||
33 -1.040387e-03 0.000000e+00
|
||||
34 -2.250679e-03 0.000000e+00
|
||||
35 -2.969069e-03 0.000000e+00
|
||||
36 -2.912737e-03 0.000000e+00
|
||||
37 -1.990583e-03 0.000000e+00
|
||||
38 -3.573537e-04 0.000000e+00
|
||||
39 1.598840e-03 0.000000e+00
|
||||
40 3.340972e-03 0.000000e+00
|
||||
41 4.323764e-03 0.000000e+00
|
||||
42 4.155636e-03 0.000000e+00
|
||||
43 2.736002e-03 0.000000e+00
|
||||
44 3.234310e-04 0.000000e+00
|
||||
45 -2.494752e-03 0.000000e+00
|
||||
46 -4.934943e-03 0.000000e+00
|
||||
47 -6.225197e-03 0.000000e+00
|
||||
48 -5.836136e-03 0.000000e+00
|
||||
49 -3.668966e-03 0.000000e+00
|
||||
50 -1.394092e-04 0.000000e+00
|
||||
51 3.880228e-03 0.000000e+00
|
||||
52 7.261232e-03 0.000000e+00
|
||||
53 8.919356e-03 0.000000e+00
|
||||
54 8.140252e-03 0.000000e+00
|
||||
55 4.837050e-03 0.000000e+00
|
||||
56 -3.434785e-04 0.000000e+00
|
||||
57 -6.115665e-03 0.000000e+00
|
||||
58 -1.084778e-02 0.000000e+00
|
||||
59 -1.299272e-02 0.000000e+00
|
||||
60 -1.154995e-02 0.000000e+00
|
||||
61 -6.430376e-03 0.000000e+00
|
||||
62 1.391199e-03 0.000000e+00
|
||||
63 1.000571e-02 0.000000e+00
|
||||
64 1.698057e-02 0.000000e+00
|
||||
65 1.997340e-02 0.000000e+00
|
||||
66 1.740665e-02 0.000000e+00
|
||||
67 9.029463e-03 0.000000e+00
|
||||
68 -3.794969e-03 0.000000e+00
|
||||
69 -1.818304e-02 0.000000e+00
|
||||
70 -3.022295e-02 0.000000e+00
|
||||
71 -3.578333e-02 0.000000e+00
|
||||
72 -3.146898e-02 0.000000e+00
|
||||
73 -1.550444e-02 0.000000e+00
|
||||
74 1.167237e-02 0.000000e+00
|
||||
75 4.726833e-02 0.000000e+00
|
||||
76 8.650819e-02 0.000000e+00
|
||||
77 1.234668e-01 0.000000e+00
|
||||
78 1.521942e-01 0.000000e+00
|
||||
79 1.678939e-01 0.000000e+00
|
||||
80 1.678939e-01 0.000000e+00
|
||||
81 1.521942e-01 0.000000e+00
|
||||
82 1.234668e-01 0.000000e+00
|
||||
83 8.650819e-02 0.000000e+00
|
||||
84 4.726833e-02 0.000000e+00
|
||||
85 1.167237e-02 0.000000e+00
|
||||
86 -1.550444e-02 0.000000e+00
|
||||
87 -3.146898e-02 0.000000e+00
|
||||
88 -3.578333e-02 0.000000e+00
|
||||
89 -3.022295e-02 0.000000e+00
|
||||
90 -1.818304e-02 0.000000e+00
|
||||
91 -3.794969e-03 0.000000e+00
|
||||
92 9.029463e-03 0.000000e+00
|
||||
93 1.740665e-02 0.000000e+00
|
||||
94 1.997340e-02 0.000000e+00
|
||||
95 1.698057e-02 0.000000e+00
|
||||
96 1.000571e-02 0.000000e+00
|
||||
97 1.391199e-03 0.000000e+00
|
||||
98 -6.430376e-03 0.000000e+00
|
||||
99 -1.154995e-02 0.000000e+00
|
||||
100 -1.299272e-02 0.000000e+00
|
||||
101 -1.084778e-02 0.000000e+00
|
||||
102 -6.115665e-03 0.000000e+00
|
||||
103 -3.434785e-04 0.000000e+00
|
||||
104 4.837050e-03 0.000000e+00
|
||||
105 8.140252e-03 0.000000e+00
|
||||
106 8.919356e-03 0.000000e+00
|
||||
107 7.261232e-03 0.000000e+00
|
||||
108 3.880228e-03 0.000000e+00
|
||||
109 -1.394092e-04 0.000000e+00
|
||||
110 -3.668966e-03 0.000000e+00
|
||||
111 -5.836136e-03 0.000000e+00
|
||||
112 -6.225197e-03 0.000000e+00
|
||||
113 -4.934943e-03 0.000000e+00
|
||||
114 -2.494752e-03 0.000000e+00
|
||||
115 3.234310e-04 0.000000e+00
|
||||
116 2.736002e-03 0.000000e+00
|
||||
117 4.155636e-03 0.000000e+00
|
||||
118 4.323764e-03 0.000000e+00
|
||||
119 3.340972e-03 0.000000e+00
|
||||
120 1.598840e-03 0.000000e+00
|
||||
121 -3.573537e-04 0.000000e+00
|
||||
122 -1.990583e-03 0.000000e+00
|
||||
123 -2.912737e-03 0.000000e+00
|
||||
124 -2.969069e-03 0.000000e+00
|
||||
125 -2.250679e-03 0.000000e+00
|
||||
126 -1.040387e-03 0.000000e+00
|
||||
127 2.854628e-04 0.000000e+00
|
||||
128 1.367725e-03 0.000000e+00
|
||||
129 1.956625e-03 0.000000e+00
|
||||
130 1.963222e-03 0.000000e+00
|
||||
131 1.461708e-03 0.000000e+00
|
||||
132 6.483476e-04 0.000000e+00
|
||||
133 -2.262541e-04 0.000000e+00
|
||||
134 -9.300168e-04 0.000000e+00
|
||||
135 -1.307358e-03 0.000000e+00
|
||||
136 -1.308918e-03 0.000000e+00
|
||||
137 -9.891580e-04 0.000000e+00
|
||||
138 -4.771493e-04 0.000000e+00
|
||||
139 6.783176e-05 0.000000e+00
|
||||
140 5.019202e-04 0.000000e+00
|
||||
141 7.312587e-04 0.000000e+00
|
||||
142 7.281899e-04 0.000000e+00
|
||||
143 5.277090e-04 0.000000e+00
|
||||
144 2.083330e-04 0.000000e+00
|
||||
145 -1.349516e-04 0.000000e+00
|
||||
146 -4.172837e-04 0.000000e+00
|
||||
147 -5.832354e-04 0.000000e+00
|
||||
148 -6.150085e-04 0.000000e+00
|
||||
149 -5.295104e-04 0.000000e+00
|
||||
150 -3.673259e-04 0.000000e+00
|
||||
151 -1.779232e-04 0.000000e+00
|
||||
152 -5.723101e-06 0.000000e+00
|
||||
153 1.193456e-04 0.000000e+00
|
||||
154 1.854891e-04 0.000000e+00
|
||||
155 1.973105e-04 0.000000e+00
|
||||
156 1.701887e-04 0.000000e+00
|
||||
157 1.234553e-04 0.000000e+00
|
||||
158 7.453280e-05 0.000000e+00
|
||||
159 4.032461e-05 0.000000e+00
|
||||
73
lib/python/nettab/test/small-inst.db
Normal file
73
lib/python/nettab/test/small-inst.db
Normal file
@ -0,0 +1,73 @@
|
||||
# Begin data logger list
|
||||
# Gain max.spfr mcld IIR(A,I)/FIR filter stages (not mandatory)
|
||||
Ia: DigitizerModel="M24" M24-SC M24/BW
|
||||
Ia: DigitizerModel="Q330" Q330/N Q330/HR Q330-SC
|
||||
|
||||
Ia: RecorderModel="M24" M24-SC M24/BW
|
||||
Ia: RecorderModel="SeisComP" Q330-SC
|
||||
Ia: RecorderModel="Q330" Q330/N Q330/HR
|
||||
|
||||
Ia: RecorderManufacturer="Quanterra" Q330/N Q330/HR
|
||||
Ia: RecorderManufacturer="Lennartz" M24-SC M24/BW
|
||||
Ia: RecorderManufacturer="Alpha2000" Q330-SC
|
||||
|
||||
Ia: DigitizerManufacturer="Quanterra" Q330/N Q330/HR Q330-SC
|
||||
Ia: DigitizerManufacturer="Lennartz" M24-SC M24/BW
|
||||
|
||||
# Gain max.spfr mcld IIR(A,I)/FIR filter stages (not mandatory)
|
||||
Dl: Q330/N 419430.0 100.0 0.0 Q330 200,100_1,50_2,40_3,20_4,1_5,0.1_5/10
|
||||
Dl: Q330/HR 1677720.0 100.0 0.0 Q330 100_1,50_2,40_3,20_4,1_5,0.1_5/10
|
||||
Dl: Q330-SC 419430.0 100.0 0.0 Q330 100_1,50_1/6,20_1/7,1_1/7/8/9,0.1_1/7/8/9/10
|
||||
|
||||
#
|
||||
# End data logger list
|
||||
|
||||
|
||||
# FIR filter list for Quanterra Q330 digitizer and Seiscomp recorder
|
||||
# Name Sym ncf inrate fac delay corrtn gain frg
|
||||
Ff: Q330_FIR_1 q330_b100_100 A 65 0 100.0 1 0.041607 0.041607 1.0 0.0
|
||||
Ff: Q330_FIR_2 q330_b100_50 A 81 0 50.0 1 0.531607 0.531607 1.0 0.0
|
||||
Ff: Q330_FIR_3 q330_b100_40 A 39 0 40.0 1 0.430462 0.430462 1.0 0.0
|
||||
Ff: Q330_FIR_4 q330_b100_20 A 67 0 20.0 1 1.630462 1.630462 1.0 0.0
|
||||
Ff: Q330_FIR_5 q330_b100_1 A 31 0 1.0 1 15.930462 15.930462 1.0 0.0
|
||||
Ff: Q330_FIR_6 scp_deci2.1 C 48 0 100.0 2 0.000 0.0 1.0 0.0
|
||||
Ff: Q330_FIR_7 scp_deci5.1 C 80 0 100.0 5 0.000 0.0 1.0 0.0
|
||||
Ff: Q330_FIR_8 scp_deci2.1 C 48 0 20.0 2 0.000 0.0 1.0 0.0
|
||||
Ff: Q330_FIR_9 scp_deci10.1 C 200 0 10.0 10 0.000 0.0 1.0 0.0
|
||||
Ff: Q330_FIR_10 scp_deci10.1 C 200 0 1.0 10 0.000 0.0 4.0 0.0
|
||||
|
||||
|
||||
|
||||
# Digitizer IIR filter response list
|
||||
#
|
||||
|
||||
# Digitizer analog response list
|
||||
#
|
||||
|
||||
|
||||
# Begin seismometer list
|
||||
# Seismometer analog response list
|
||||
# . Gain frgn Norm.fac fnr nz np Zeros&Poles
|
||||
# Sensor type: VBB
|
||||
Ia: Model="STS-2/CZ" STS-2/CZ
|
||||
Ia: Model="STS-2/N" STS-2/N
|
||||
Ia: Model="STS-2/G2" STS-2/G2
|
||||
Ia: Model="STS-2/HG" STS-2/HG
|
||||
Ia: Model="STS-2/G1" STS-2/G1
|
||||
Ia: Model="STS-2/G3" STS-2/G3
|
||||
|
||||
Ia: Type="VBB" STS-2/CZ STS-2/N STS-2/G2 STS-2/HG STS-2/G3 STS-2/G1
|
||||
|
||||
Ia: Unit="M/S" STS-2/CZ STS-2/N STS-2/G2 STS-2/HG STS-2/G3 STS-2/G1
|
||||
|
||||
Ia: Manufacturer="Streckeisen" STS-2/CZ STS-2/N STS-2/G2 STS-2/HG STS-2/G3 STS-2/G1
|
||||
|
||||
Se: STS-2/N 1500.0 0.02 6.0077e7 1.0 2 5 2(0.0,0.0) (-0.037004,0.037016) (-0.037004,-0.037016) (-251.33,0.0) (-131.04,-467.29) (-131.04,467.29)
|
||||
Se: STS-2/G1 1500.0 0.02 3.46844e17 1.0 5 9 2(0.0,0.0) (-15.15,0.0) (-318.6,401.2) (-318.6,-401.2) (-0.037,0.037) (-0.037,-0.037) (-15.99,0.0) (-100.9,401.9) (-100.9,-401.9) (-187.2,0.0) (-417.1,0.0) (-7454.0,7142.0) (-7454.0,-7142.0)
|
||||
Se: STS-2/G2 1500.0 0.02 3.46844e17 1.0 9 14 2(0.0,0.0) (-10.75,0.0) (-294.6,0.0) (-555.1,0.0) (-683.9,175.5) (-683.9,-175.5) (-5907.0,3411.0) (-5907.0,-3411.0) (-0.037,0.037) (-0.037,-0.037) (-10.95,0.0) (-98.44,442.8) (-98.44,-442.8) (-251.1,0.0) (-556.8,60.0) (-556.8,-60.0) (-1391.0,0.0) (-4936.0,4713.0) (-4936.0,-4713.0) (-6227.0,0.0) (-6909.0,9208.0) (-6909.0,-9208.0)
|
||||
Se: STS-2/G3 1500.0 0.02 3.46844e17 1.0 6 11 2(0.0,0.0) (-15.15,0.0) (-176.6,0.0) (-463.1,430.5) (-463.1,-430.5) (-0.037,0.037) (-0.037,-0.037) (-15.64,0.0) (-97.34,-400.7) (-97.34,400.7) (-255.1,0.0) (-374.8,0.0) (-520.3,0.0) (-10530.,10050.) (-10530.,-10050.) (-13300.,0.0)
|
||||
#Streckeisen_STS-2/HG> 20000.0 0.02 3.46844e17 1.0 6 11 2(0.0,0.0) (-15.15,0.0) (-176.6,0.0) (-463.1,430.5) (-463.1,430.5) (-0.037,0.037) (-0.037,-0.037) (-15.64,0.0) (-97.34,-400.7) (-97.34,400.7) (-255.1,0.0) (-374.8,0.0) (-520.3,0.0) (-10530.,10050.) (-10530.,-10050.) (-13300.,0.0)
|
||||
#Streckeisen_STS-2/CZ> 1500.0 1.0 4.47172e2 1.0 6 7 2(0.0,0.0) (-15.1488,0.0) (-199.554,0.0) (-461.814,429.079) (-461.814,-429.079) (-0.03702,0.03702) (-0.03702,-0.03702) (-15.2744,0.0) (-82.8124,409.852) (-82.8124,-409.852) (-443.314,0.0) (-454.526,0.0)
|
||||
|
||||
|
||||
# End seismometer list
|
||||
291
lib/python/nettab/test/testTab.py
Normal file
291
lib/python/nettab/test/testTab.py
Normal file
@ -0,0 +1,291 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
###############################################################################
|
||||
# Copyright (C) 2020 Helmholtz-Zentrum Potsdam - Deutsches
|
||||
# GeoForschungsZentrum GFZ
|
||||
#
|
||||
# License: GPL Affero General Public License (GNU AGPL) version 3.0
|
||||
# Author: Peter L. Evans
|
||||
# E-mail: <pevans@gfz-potsdam.de>
|
||||
#
|
||||
###############################################################################
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from nettab.tab import Tab
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
# Just to dump XML output??:
|
||||
try:
|
||||
import seiscomp.io as IO
|
||||
except ImportError:
|
||||
print('Failed to import seiscomp.io module, trying seiscomp3.IO instead')
|
||||
from seiscomp3 import IO
|
||||
|
||||
|
||||
# Just to examine the output XML:
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
|
||||
def xmlparse(filename):
|
||||
parser = ET.XMLParser()
|
||||
try:
|
||||
parser.feed(open(filename).read())
|
||||
except Exception:
|
||||
raise
|
||||
elem = parser.close()
|
||||
ns = '{http://geofon.gfz-potsdam.de/ns/seiscomp3-schema/0.11}'
|
||||
return (elem, ns)
|
||||
|
||||
|
||||
class TestTab(unittest.TestCase):
|
||||
simpleTab = '''
|
||||
Nw: QQ 2020-04-01
|
||||
Na: Description="Atlantis Seismic Network"
|
||||
Sl: AA01 "Pillars of Hercules/Atlantis" Q330/N%xxxx STS-2/N%yyyy 100/20 ZNE 30.0 -15.0 -900 2.0 2020-04-02
|
||||
'''
|
||||
|
||||
tabWithPid = '''
|
||||
Nw: QQ 2020-04-01
|
||||
Na: Description="Atlantis Seismic Network"
|
||||
Na: Pid="doi:10.1234/xyz"
|
||||
Sl: AA01 "Pillars of Hercules/Atlantis" Q330/N%xxxx STS-2/N%yyyy 100/20 ZNE 30.0 -15.0 -900 2.0 2020-04-02
|
||||
'''
|
||||
|
||||
instFile = 'small-inst.db'
|
||||
|
||||
templateTab = '''
|
||||
Nw: {nwline}
|
||||
Na: {naline}
|
||||
Sl: {slline}
|
||||
'''
|
||||
|
||||
def _writeTempTab(self, tabText):
|
||||
'''Put a nettab formatted string into a temporary file,
|
||||
returning the file name.
|
||||
'''
|
||||
with tempfile.NamedTemporaryFile(delete=False) as tab:
|
||||
print(tabText, file=tab)
|
||||
tab.close()
|
||||
return tab.name
|
||||
|
||||
def _writeInvXML(self, inv, filename='something.xml'):
|
||||
'''Copied from tab2inv.py'''
|
||||
ar = IO.XMLArchive()
|
||||
print("Generating file: %s" % filename,
|
||||
file=sys.stderr)
|
||||
ar.create(filename)
|
||||
ar.setFormattedOutput(True)
|
||||
ar.setCompression(False)
|
||||
ar.writeObject(inv)
|
||||
ar.close()
|
||||
|
||||
def _writeNewInvXML(self, sc3inv, filename):
|
||||
try:
|
||||
os.unlink(filename)
|
||||
except OSError: # Python3: Catch FileNotFoundError instead.
|
||||
pass
|
||||
self._writeInvXML(sc3inv, filename)
|
||||
|
||||
def test_1(self):
|
||||
'''Create object'''
|
||||
t = Tab()
|
||||
print('Expect: "Warning, not filter folder supplied."',
|
||||
file=sys.stderr)
|
||||
|
||||
def test_2_filter(self):
|
||||
'''Provide a (trivial, non-useful) filter folder'''
|
||||
t = Tab(None, None, '.', None, None)
|
||||
|
||||
def test_2_defaults_warning(self):
|
||||
'''Provide and load a defaults file'''
|
||||
defaults = tempfile.NamedTemporaryFile(delete=False)
|
||||
print('''
|
||||
Nw: QQ 2001/001
|
||||
''', file=defaults)
|
||||
defaultsFile = defaults.name
|
||||
defaults.close()
|
||||
t = Tab(None, defaultsFile, '.', None, None)
|
||||
os.unlink(defaultsFile)
|
||||
print("Expect: 'Warning: Defaults file can only contain attributes'",
|
||||
file=sys.stderr)
|
||||
|
||||
def test_2_defaults_attributes(self):
|
||||
'''Provide and load a defaults file'''
|
||||
defaults = tempfile.NamedTemporaryFile(delete=False)
|
||||
print('''
|
||||
Na: Foo=bar
|
||||
Sa: StationFoo=bla * *
|
||||
Ia: InstrumentFoo=blu *
|
||||
''', file=defaults)
|
||||
defaultsFile = defaults.name
|
||||
defaults.close()
|
||||
t = Tab(None, defaultsFile, '.', None, None)
|
||||
os.unlink(defaultsFile)
|
||||
|
||||
def test_3_digest(self):
|
||||
tabFile = self._writeTempTab(self.simpleTab)
|
||||
|
||||
t = Tab(None, None, '.', None, None)
|
||||
t.digest(tabFile)
|
||||
os.unlink(tabFile)
|
||||
|
||||
def SKIPtest_3_digest_check(self):
|
||||
tabFile = self._writeTempTab(self.simpleTab)
|
||||
|
||||
t = Tab(None, None, 'filters', None, None)
|
||||
t.digest(tabFile)
|
||||
t.digest(self.instFile)
|
||||
t.check()
|
||||
os.unlink(tabFile)
|
||||
|
||||
def test_4_digest_twice(self):
|
||||
'''Exception is raised by digesting twice.'''
|
||||
tabFile = self._writeTempTab(self.simpleTab)
|
||||
|
||||
t = Tab(None, None, '.', None, None)
|
||||
t.digest(tabFile)
|
||||
with self.assertRaises(Exception):
|
||||
t.digest(tabFile)
|
||||
# print('Expect: "Warning: File {name} is already digested."')
|
||||
|
||||
os.unlink(tabFile)
|
||||
|
||||
def test_5_na_after_sa(self):
|
||||
'''Not allowed to provide Na lines after a Sl line'''
|
||||
s = '\n'.join([self.simpleTab, 'Na: Pid=10.123/xyz'])
|
||||
tabFile = self._writeTempTab(s)
|
||||
|
||||
with self.assertRaises(Exception):
|
||||
t.digest(tabFile)
|
||||
# print('Expect "No Na lines after a Sl line.',
|
||||
# 'Network has already been defined."')
|
||||
os.unlink(tabFile)
|
||||
|
||||
def test_6_network_pid(self):
|
||||
'''Key 'Pid' is an allowed network attribute'''
|
||||
tabString = '''
|
||||
Nw: QQ 2001/001
|
||||
Na: Region=Atlantis
|
||||
Na: Pid=10.123/xyz
|
||||
'''
|
||||
tabFile = self._writeTempTab(tabString)
|
||||
|
||||
t = Tab(None, None, '.', None, None)
|
||||
t.digest(tabFile)
|
||||
os.unlink(tabFile)
|
||||
|
||||
def test_6_network_pid_check(self):
|
||||
'''No problem to define extra unhandled attributes'''
|
||||
tabString = '''
|
||||
Nw: QQ 2001/001
|
||||
Na: Region=Atlantis
|
||||
Na: Pid=10.123/xyz
|
||||
Na: Foo=bar
|
||||
'''
|
||||
tabFile = self._writeTempTab(tabString)
|
||||
|
||||
t = Tab(None, None, '.', None, None)
|
||||
t.digest(tabFile)
|
||||
t.check()
|
||||
os.unlink(tabFile)
|
||||
|
||||
def test_7_sc3Obj(self):
|
||||
'''Call sc3Obj with a trivial t'''
|
||||
t = Tab(None, None, '.', None, None)
|
||||
sc3inv = t.sc3Obj()
|
||||
|
||||
def test_8_network_sc3Obj(self):
|
||||
'''Call sc3Obj with an actual network, write XML'''
|
||||
tabFile = self._writeTempTab(self.simpleTab)
|
||||
|
||||
t = Tab(None, None, 'filters', None, None)
|
||||
t.digest(tabFile)
|
||||
t.digest(self.instFile)
|
||||
sc3inv = t.sc3Obj()
|
||||
# Returns ok, but reports inst.db errors and warnings to stdout.
|
||||
self.assertTrue(sc3inv)
|
||||
if sc3inv is None:
|
||||
assert('scinv is None')
|
||||
sc3inv
|
||||
outFile = '/tmp/testTabInv.xml'
|
||||
|
||||
try:
|
||||
os.unlink(outFile)
|
||||
except OSError: # Python3: Catch FileNotFoundError instead.
|
||||
pass
|
||||
|
||||
self._writeInvXML(sc3inv, filename=outFile)
|
||||
self.assertTrue(os.path.exists(outFile))
|
||||
# Further checks: that the file contains a network, etc.
|
||||
|
||||
def test_9_network_pid_sc3Obj(self):
|
||||
'''Load a network with PID, write XML, confirm PID is there.
|
||||
Older nettabs reported 'ignoring attribute Pid'.
|
||||
'''
|
||||
tabFile = self._writeTempTab(self.tabWithPid)
|
||||
|
||||
t = Tab(None, None, 'filters', None, None)
|
||||
t.digest(tabFile)
|
||||
t.digest(self.instFile)
|
||||
sc3inv = t.sc3Obj()
|
||||
self.assertTrue(sc3inv)
|
||||
|
||||
outFile = '/tmp/testTabInvPid.xml'
|
||||
self._writeNewInvXML(sc3inv, outFile)
|
||||
self.assertTrue(os.path.exists(outFile))
|
||||
|
||||
# Check that the file contains exactly one network comment
|
||||
# which is a JSON string with PID.
|
||||
# e.g. '{"type": "DOI", "value": "10.1234/xsdfa"}'
|
||||
(elem, ns) = xmlparse(outFile)
|
||||
for e in elem:
|
||||
for f in e:
|
||||
if f.tag == ns + 'network':
|
||||
g = f.findall(ns + 'comment')
|
||||
self.assertTrue(len(g) == 1)
|
||||
t = g[0].findall(ns + 'text')
|
||||
text = t[0].text
|
||||
j = json.loads(t[0].text)
|
||||
self.assertEqual(j['type'], 'DOI')
|
||||
self.assertEqual(j['value'], '10.1234/xyz')
|
||||
### self.assertEqual(t[0].text, 'doi:10.1234/xyz')
|
||||
|
||||
def test_10_network_comment(self):
|
||||
tabString = '''
|
||||
Nw: NN 2020/092
|
||||
Na: Region=Atlantis
|
||||
Na: Comment="This is commentary"
|
||||
Na: Remark="Remarkable!"
|
||||
Sl: AA01 "Zeus" Q330/N%xxxx STS-2/N%yyyy 20 Z 30 -15 -2 2.0 2020/093
|
||||
'''
|
||||
tabFile = self._writeTempTab(tabString)
|
||||
t = Tab(None, None, 'filters', None, None)
|
||||
t.digest(tabFile)
|
||||
t.digest(self.instFile)
|
||||
t.check()
|
||||
os.unlink(tabFile)
|
||||
|
||||
sc3inv = t.sc3Obj()
|
||||
self.assertTrue(sc3inv)
|
||||
outFile = '/tmp/testTabInvComment.xml'
|
||||
self._writeNewInvXML(sc3inv, '/tmp/testTabInvComment.xml')
|
||||
self.assertTrue(os.path.exists(outFile))
|
||||
|
||||
# Further checks: that the file contains a network with PID. TODO
|
||||
(elem, ns) = xmlparse(outFile)
|
||||
for e in elem:
|
||||
for f in e:
|
||||
if f.tag == ns + 'network':
|
||||
g = f.findall(ns + 'comment')
|
||||
self.assertTrue(len(g) == 1)
|
||||
# DEBUG print('DEBUG Network comment found:',
|
||||
# g[0].findall(ns + 'text')[0].text)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(verbosity=1)
|
||||
4
lib/python/seiscomp/__init__.py
Normal file
4
lib/python/seiscomp/__init__.py
Normal file
@ -0,0 +1,4 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.setdlopenflags(os.RTLD_LAZY | os.RTLD_GLOBAL)
|
||||
BIN
lib/python/seiscomp/_client.so
Normal file
BIN
lib/python/seiscomp/_client.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_config.so
Normal file
BIN
lib/python/seiscomp/_config.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_core.so
Normal file
BIN
lib/python/seiscomp/_core.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_geo.so
Normal file
BIN
lib/python/seiscomp/_geo.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_io.so
Normal file
BIN
lib/python/seiscomp/_io.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_logging.so
Normal file
BIN
lib/python/seiscomp/_logging.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_math.so
Normal file
BIN
lib/python/seiscomp/_math.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_seismology.so
Normal file
BIN
lib/python/seiscomp/_seismology.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_system.so
Normal file
BIN
lib/python/seiscomp/_system.so
Normal file
Binary file not shown.
BIN
lib/python/seiscomp/_utils.so
Normal file
BIN
lib/python/seiscomp/_utils.so
Normal file
Binary file not shown.
560
lib/python/seiscomp/bindings2cfg.py
Normal file
560
lib/python/seiscomp/bindings2cfg.py
Normal file
@ -0,0 +1,560 @@
|
||||
############################################################################
|
||||
# Copyright (C) gempa GmbH #
|
||||
# All rights reserved. #
|
||||
# Contact: gempa GmbH (seiscomp-dev@gempa.de) #
|
||||
# #
|
||||
# GNU Affero General Public License Usage #
|
||||
# This file may be used under the terms of the GNU Affero #
|
||||
# Public License version 3.0 as published by the Free Software Foundation #
|
||||
# and appearing in the file LICENSE included in the packaging of this #
|
||||
# file. Please review the following information to ensure the GNU Affero #
|
||||
# Public License version 3.0 requirements will be met: #
|
||||
# https://www.gnu.org/licenses/agpl-3.0.html. #
|
||||
# #
|
||||
# Other Usage #
|
||||
# Alternatively, this file may be used in accordance with the terms and #
|
||||
# conditions contained in a signed written agreement between you and #
|
||||
# gempa GmbH. #
|
||||
############################################################################
|
||||
|
||||
import os, time, sys
|
||||
import seiscomp.core, seiscomp.client, seiscomp.datamodel
|
||||
import seiscomp.io, seiscomp.system
|
||||
|
||||
|
||||
def collectParams(container):
|
||||
params = {}
|
||||
for i in range(container.groupCount()):
|
||||
params.update(collectParams(container.group(i)))
|
||||
for i in range(container.structureCount()):
|
||||
params.update(collectParams(container.structure(i)))
|
||||
for i in range(container.parameterCount()):
|
||||
p = container.parameter(i)
|
||||
if p.symbol.stage == seiscomp.system.Environment.CS_UNDEFINED:
|
||||
continue
|
||||
params[p.variableName] = ",".join(p.symbol.values)
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def collect(idset, paramSetID):
|
||||
paramSet = seiscomp.datamodel.ParameterSet.Find(paramSetID)
|
||||
if not paramSet:
|
||||
return
|
||||
idset[paramSet.publicID()] = 1
|
||||
if not paramSet.baseID():
|
||||
return
|
||||
collect(idset, paramSet.baseID())
|
||||
|
||||
|
||||
def sync(paramSet, params):
|
||||
obsoleteParams = []
|
||||
seenParams = {}
|
||||
i = 0
|
||||
while i < paramSet.parameterCount():
|
||||
p = paramSet.parameter(i)
|
||||
if p.name() in params:
|
||||
if p.name() in seenParams:
|
||||
# Multiple parameter definitions with same name
|
||||
sys.stderr.write(
|
||||
f"- {p.publicID()}:{p.name()} / duplicate parameter name\n"
|
||||
)
|
||||
p.detach()
|
||||
continue
|
||||
seenParams[p.name()] = 1
|
||||
val = params[p.name()]
|
||||
if val != p.value():
|
||||
p.setValue(val)
|
||||
p.update()
|
||||
else:
|
||||
obsoleteParams.append(p)
|
||||
i = i + 1
|
||||
|
||||
for p in obsoleteParams:
|
||||
p.detach()
|
||||
|
||||
for key, val in list(params.items()):
|
||||
if key in seenParams:
|
||||
continue
|
||||
p = seiscomp.datamodel.Parameter.Create()
|
||||
p.setName(key)
|
||||
p.setValue(val)
|
||||
paramSet.add(p)
|
||||
|
||||
|
||||
class ConfigDBUpdater(seiscomp.client.Application):
|
||||
def __init__(self, argc, argv):
|
||||
seiscomp.client.Application.__init__(self, argc, argv)
|
||||
self.setLoggingToStdErr(True)
|
||||
self.setMessagingEnabled(True)
|
||||
self.setDatabaseEnabled(True, True)
|
||||
self.setAutoApplyNotifierEnabled(False)
|
||||
self.setInterpretNotifierEnabled(False)
|
||||
self.setMessagingUsername("_sccfgupd_")
|
||||
self.setLoadConfigModuleEnabled(True)
|
||||
# Load all configuration modules
|
||||
self.setConfigModuleName("")
|
||||
self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP)
|
||||
|
||||
self._moduleName = None
|
||||
self._outputFile = None
|
||||
self._createNotifier = False
|
||||
self._keyDir = None
|
||||
|
||||
def createCommandLineDescription(self):
|
||||
self.commandline().addGroup("Input")
|
||||
self.commandline().addStringOption(
|
||||
"Input",
|
||||
"key-dir",
|
||||
"Overrides the location of the default key directory ($SEISCOMP_ROOT/etc/key)",
|
||||
)
|
||||
self.commandline().addGroup("Output")
|
||||
self.commandline().addStringOption(
|
||||
"Output", "module-name", "The module name to be used for the config module. If not given then the application name is being used or 'trunk' if output to a file is enabled"
|
||||
)
|
||||
self.commandline().addStringOption(
|
||||
"Output", "output,o", "If given, an output XML file is generated"
|
||||
)
|
||||
self.commandline().addOption(
|
||||
"Output", "create-notifier", "If given then a notifier message containing all notifiers "
|
||||
"will be written to the output XML. This option only applies "
|
||||
"if an output file is given. Notifier creation either requires "
|
||||
"and input database and an input config XML as reference."
|
||||
)
|
||||
|
||||
def validateParameters(self):
|
||||
if not seiscomp.client.Application.validateParameters(self):
|
||||
return False
|
||||
|
||||
try:
|
||||
self._moduleName = self.commandline().optionString("module-name")
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._outputFile = self.commandline().optionString("output")
|
||||
self._createNotifier = self.commandline().hasOption("create-notifier")
|
||||
# Switch to offline mode
|
||||
self.setMessagingEnabled(False)
|
||||
self.setDatabaseEnabled(False, False)
|
||||
if self._createNotifier:
|
||||
if self.isConfigDatabaseEnabled() == True:
|
||||
self.setDatabaseEnabled(True, False);
|
||||
else:
|
||||
self.setLoadConfigModuleEnabled(False)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._keyDir = self.commandline().optionString("key-dir")
|
||||
except:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
def init(self):
|
||||
if not seiscomp.client.Application.init(self):
|
||||
return False
|
||||
|
||||
# Initialize the basic directories
|
||||
filebase = seiscomp.system.Environment.Instance().installDir()
|
||||
descdir = os.path.join(filebase, "etc", "descriptions")
|
||||
|
||||
# Load definitions of the configuration schema
|
||||
defs = seiscomp.system.SchemaDefinitions()
|
||||
if not defs.load(descdir):
|
||||
print("Error: could not read descriptions", file=sys.stderr)
|
||||
return False
|
||||
|
||||
if defs.moduleCount() == 0:
|
||||
print("Warning: no modules defined, nothing to do", file=sys.stderr)
|
||||
return False
|
||||
|
||||
# Create a model from the schema and read its configuration including
|
||||
# all bindings.
|
||||
model = seiscomp.system.Model()
|
||||
if self._keyDir:
|
||||
model.keyDirOverride = self._keyDir
|
||||
model.create(defs)
|
||||
model.readConfig()
|
||||
|
||||
# Find all binding mods for trunk. Bindings of modules where standalone
|
||||
# is set to true are ignored. They are supposed to handle their bindings
|
||||
# on their own.
|
||||
self.bindingMods = []
|
||||
for i in range(defs.moduleCount()):
|
||||
mod = defs.module(i)
|
||||
# Ignore stand alone modules (eg seedlink, slarchive, ...) as they
|
||||
# are not using the trunk libraries and don't need database
|
||||
# configurations
|
||||
if mod.isStandalone():
|
||||
continue
|
||||
|
||||
self.bindingMods.append(mod.name)
|
||||
|
||||
if len(self.bindingMods) == 0:
|
||||
print("Warning: no usable modules found, nothing to do", file=sys.stderr)
|
||||
return False
|
||||
|
||||
self.stationSetups = {}
|
||||
|
||||
# Read bindings
|
||||
for m in self.bindingMods:
|
||||
mod = model.module(m)
|
||||
if not mod:
|
||||
print(f"Warning: module {m} not assigned", file=sys.stderr)
|
||||
continue
|
||||
if len(mod.bindings) == 0:
|
||||
continue
|
||||
|
||||
if len(m) > 20:
|
||||
print(
|
||||
f"Error: rejecting module {m} - name is longer than 20 characters",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return False
|
||||
|
||||
# Rename global to default for being compatible with older
|
||||
# releases
|
||||
if m == "global":
|
||||
m = "default"
|
||||
|
||||
print(f"+ {m}", file=sys.stderr)
|
||||
|
||||
for staid in list(mod.bindings.keys()):
|
||||
binding = mod.getBinding(staid)
|
||||
if not binding:
|
||||
continue
|
||||
# sys.stderr.write(" + %s.%s\n" % (staid.networkCode, staid.stationCode))
|
||||
params = {}
|
||||
for i in range(binding.sectionCount()):
|
||||
params.update(collectParams(binding.section(i)))
|
||||
key = (staid.networkCode, staid.stationCode)
|
||||
if not key in self.stationSetups:
|
||||
self.stationSetups[key] = {}
|
||||
self.stationSetups[key][m] = params
|
||||
print(
|
||||
f" + read {len(list(mod.bindings.keys()))} stations", file=sys.stderr
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def printUsage(self):
|
||||
print(
|
||||
"""Usage:
|
||||
bindings2cfg [options]
|
||||
|
||||
Synchronize bindings from key files with processing system or output as
|
||||
configuration XML file"""
|
||||
)
|
||||
|
||||
seiscomp.client.Application.printUsage(self)
|
||||
|
||||
print(
|
||||
"""Examples:
|
||||
Write bindings configuration from key directory to a configuration XML file:
|
||||
bindings2cfg --key-dir ./etc/key -o config.xml
|
||||
|
||||
Synchronize bindings configuration from key directory to a processing system
|
||||
bindings2cfg --key-dir ./etc/key -H proc
|
||||
"""
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def send(self, *args):
|
||||
"""
|
||||
A simple wrapper that sends a message and tries to resend it in case of
|
||||
an error.
|
||||
"""
|
||||
while not self.connection().send(*args):
|
||||
print("Warning: sending failed, retrying", file=sys.stderr)
|
||||
time.sleep(1)
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Reimplements the main loop of the application. This methods collects
|
||||
all bindings and updates the database. It searches for already existing
|
||||
objects and updates them or creates new objects. Objects that is didn't
|
||||
touched are removed. This tool is the only one that should writes the
|
||||
configuration into the database and thus manages the content.
|
||||
"""
|
||||
config = seiscomp.client.ConfigDB.Instance().config()
|
||||
if config is None:
|
||||
config = seiscomp.datamodel.Config()
|
||||
|
||||
configMod = None
|
||||
obsoleteConfigMods = []
|
||||
moduleName = self._moduleName
|
||||
|
||||
if self._outputFile is None or self._createNotifier == True:
|
||||
if not moduleName:
|
||||
moduleName = self.name()
|
||||
seiscomp.datamodel.Notifier.Enable()
|
||||
else:
|
||||
if not moduleName:
|
||||
moduleName = "trunk"
|
||||
|
||||
configID = f"Config/{moduleName}"
|
||||
|
||||
for i in range(config.configModuleCount()):
|
||||
if config.configModule(i).publicID() != configID:
|
||||
obsoleteConfigMods.append(config.configModule(i))
|
||||
else:
|
||||
configMod = config.configModule(i)
|
||||
|
||||
# Remove obsolete config modules
|
||||
for cm in obsoleteConfigMods:
|
||||
print(f"- {cm.name()} / obsolete module configuration", file=sys.stderr)
|
||||
ps = seiscomp.datamodel.ParameterSet.Find(cm.parameterSetID())
|
||||
if not ps is None:
|
||||
ps.detach()
|
||||
cm.detach()
|
||||
del obsoleteConfigMods
|
||||
|
||||
if not configMod:
|
||||
configMod = seiscomp.datamodel.ConfigModule.Find(configID)
|
||||
if configMod is None:
|
||||
configMod = seiscomp.datamodel.ConfigModule.Create(configID)
|
||||
config.add(configMod)
|
||||
else:
|
||||
if configMod.name() != moduleName:
|
||||
configMod.update()
|
||||
if not configMod.enabled():
|
||||
configMod.update()
|
||||
|
||||
configMod.setName(moduleName)
|
||||
configMod.setEnabled(True)
|
||||
else:
|
||||
if configMod.name() != moduleName:
|
||||
configMod.setName(moduleName)
|
||||
configMod.update()
|
||||
paramSet = seiscomp.datamodel.ParameterSet.Find(configMod.parameterSetID())
|
||||
if configMod.parameterSetID():
|
||||
configMod.setParameterSetID("")
|
||||
configMod.update()
|
||||
|
||||
if not paramSet is None:
|
||||
paramSet.detach()
|
||||
|
||||
stationConfigs = {}
|
||||
obsoleteStationConfigs = []
|
||||
|
||||
for i in range(configMod.configStationCount()):
|
||||
cs = configMod.configStation(i)
|
||||
if (cs.networkCode(), cs.stationCode()) in self.stationSetups:
|
||||
stationConfigs[(cs.networkCode(), cs.stationCode())] = cs
|
||||
else:
|
||||
obsoleteStationConfigs.append(cs)
|
||||
|
||||
for cs in obsoleteStationConfigs:
|
||||
print(
|
||||
f"- {configMod.name()}/{cs.networkCode()}/{cs.stationCode()} / obsolete "
|
||||
"station configuration",
|
||||
file=sys.stderr,
|
||||
)
|
||||
cs.detach()
|
||||
del obsoleteStationConfigs
|
||||
|
||||
for staid, setups in list(self.stationSetups.items()):
|
||||
try:
|
||||
cs = stationConfigs[staid]
|
||||
except:
|
||||
cs = seiscomp.datamodel.ConfigStation.Find(
|
||||
f"Config/{configMod.name()}/{staid[0]}/{staid[1]}"
|
||||
)
|
||||
if not cs:
|
||||
cs = seiscomp.datamodel.ConfigStation.Create(
|
||||
f"Config/{configMod.name()}/{staid[0]}/{staid[1]}"
|
||||
)
|
||||
configMod.add(cs)
|
||||
cs.setNetworkCode(staid[0])
|
||||
cs.setStationCode(staid[1])
|
||||
cs.setEnabled(True)
|
||||
|
||||
ci = seiscomp.datamodel.CreationInfo()
|
||||
ci.setCreationTime(seiscomp.core.Time.GMT())
|
||||
ci.setAgencyID(self.agencyID())
|
||||
ci.setAuthor(self.name())
|
||||
cs.setCreationInfo(ci)
|
||||
|
||||
stationSetups = {}
|
||||
obsoleteSetups = []
|
||||
for i in range(cs.setupCount()):
|
||||
setup = cs.setup(i)
|
||||
if setup.name() in setups:
|
||||
stationSetups[setup.name()] = setup
|
||||
else:
|
||||
obsoleteSetups.append(setup)
|
||||
|
||||
for s in obsoleteSetups:
|
||||
print(
|
||||
f"- {configMod.name()}/{cs.networkCode()}/{cs.stationCode()}/{setup.name()} "
|
||||
"/ obsolete station setup",
|
||||
file=sys.stderr,
|
||||
)
|
||||
ps = seiscomp.datamodel.ParameterSet.Find(s.parameterSetID())
|
||||
if ps:
|
||||
ps.detach()
|
||||
s.detach()
|
||||
del obsoleteSetups
|
||||
|
||||
newParamSets = {}
|
||||
globalSet = ""
|
||||
for mod, params in list(setups.items()):
|
||||
try:
|
||||
setup = stationSetups[mod]
|
||||
except:
|
||||
setup = seiscomp.datamodel.Setup()
|
||||
setup.setName(mod)
|
||||
setup.setEnabled(True)
|
||||
cs.add(setup)
|
||||
|
||||
paramSet = seiscomp.datamodel.ParameterSet.Find(setup.parameterSetID())
|
||||
if not paramSet:
|
||||
paramSet = seiscomp.datamodel.ParameterSet.Find(
|
||||
"ParameterSet/%s/Station/%s/%s/%s"
|
||||
% (
|
||||
configMod.name(),
|
||||
cs.networkCode(),
|
||||
cs.stationCode(),
|
||||
setup.name(),
|
||||
)
|
||||
)
|
||||
if not paramSet:
|
||||
paramSet = seiscomp.datamodel.ParameterSet.Create(
|
||||
"ParameterSet/%s/Station/%s/%s/%s"
|
||||
% (
|
||||
configMod.name(),
|
||||
cs.networkCode(),
|
||||
cs.stationCode(),
|
||||
setup.name(),
|
||||
)
|
||||
)
|
||||
config.add(paramSet)
|
||||
paramSet.setModuleID(configMod.publicID())
|
||||
paramSet.setCreated(seiscomp.core.Time.GMT())
|
||||
newParamSets[paramSet.publicID()] = 1
|
||||
setup.setParameterSetID(paramSet.publicID())
|
||||
if mod in stationSetups:
|
||||
setup.update()
|
||||
elif paramSet.moduleID() != configMod.publicID():
|
||||
paramSet.setModuleID(configMod.publicID())
|
||||
paramSet.update()
|
||||
|
||||
# Synchronize existing parameterset with the new parameters
|
||||
sync(paramSet, params)
|
||||
|
||||
if setup.name() == "default":
|
||||
globalSet = paramSet.publicID()
|
||||
|
||||
for i in range(cs.setupCount()):
|
||||
setup = cs.setup(i)
|
||||
paramSet = seiscomp.datamodel.ParameterSet.Find(setup.parameterSetID())
|
||||
if not paramSet:
|
||||
continue
|
||||
|
||||
if paramSet.publicID() != globalSet and paramSet.baseID() != globalSet:
|
||||
paramSet.setBaseID(globalSet)
|
||||
if not paramSet.publicID() in newParamSets:
|
||||
paramSet.update()
|
||||
|
||||
# Collect unused ParameterSets
|
||||
usedSets = {}
|
||||
for i in range(config.configModuleCount()):
|
||||
configMod = config.configModule(i)
|
||||
for j in range(configMod.configStationCount()):
|
||||
cs = configMod.configStation(j)
|
||||
for k in range(cs.setupCount()):
|
||||
setup = cs.setup(k)
|
||||
collect(usedSets, setup.parameterSetID())
|
||||
|
||||
# Delete unused ParameterSets
|
||||
i = 0
|
||||
while i < config.parameterSetCount():
|
||||
paramSet = config.parameterSet(i)
|
||||
if not paramSet.publicID() in usedSets:
|
||||
print(
|
||||
f"- {paramSet.publicID()} / obsolete parameter set", file=sys.stderr
|
||||
)
|
||||
paramSet.detach()
|
||||
else:
|
||||
i = i + 1
|
||||
|
||||
# Generate output file and exit if configured
|
||||
if self._outputFile is not None:
|
||||
ar = seiscomp.io.XMLArchive()
|
||||
if not ar.create(self._outputFile):
|
||||
print(
|
||||
f"Failed to created output file: {self._outputFile}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return False
|
||||
|
||||
ar.setFormattedOutput(True)
|
||||
if self._createNotifier:
|
||||
nmsg = seiscomp.datamodel.Notifier.GetMessage(True)
|
||||
ar.writeObject(nmsg)
|
||||
else:
|
||||
ar.writeObject(config)
|
||||
ar.close()
|
||||
return True
|
||||
|
||||
ncount = seiscomp.datamodel.Notifier.Size()
|
||||
if ncount > 0:
|
||||
print(f"+ synchronize {ncount} change(s)", file=sys.stderr)
|
||||
else:
|
||||
print("- database is already up-to-date", file=sys.stderr)
|
||||
return True
|
||||
|
||||
cfgmsg = seiscomp.datamodel.ConfigSyncMessage(False)
|
||||
cfgmsg.setCreationInfo(seiscomp.datamodel.CreationInfo())
|
||||
cfgmsg.creationInfo().setCreationTime(seiscomp.core.Time.GMT())
|
||||
cfgmsg.creationInfo().setAuthor(self.author())
|
||||
cfgmsg.creationInfo().setAgencyID(self.agencyID())
|
||||
self.send(seiscomp.client.Protocol.STATUS_GROUP, cfgmsg)
|
||||
|
||||
# Send messages in a batch of 100 notifiers to not exceed the
|
||||
# maximum allowed message size of ~300kb.
|
||||
msg = seiscomp.datamodel.NotifierMessage()
|
||||
nmsg = seiscomp.datamodel.Notifier.GetMessage(False)
|
||||
count = 0
|
||||
sys.stderr.write("\r + sending notifiers: %d%%" % (count * 100 / ncount))
|
||||
sys.stderr.flush()
|
||||
while nmsg:
|
||||
for o in nmsg:
|
||||
n = seiscomp.datamodel.Notifier.Cast(o)
|
||||
if n:
|
||||
msg.attach(n)
|
||||
|
||||
if msg.size() >= 100:
|
||||
count += msg.size()
|
||||
self.send("CONFIG", msg)
|
||||
msg.clear()
|
||||
sys.stderr.write(
|
||||
"\r + sending notifiers: %d%%" % (count * 100 / ncount)
|
||||
)
|
||||
sys.stderr.flush()
|
||||
|
||||
nmsg = seiscomp.datamodel.Notifier.GetMessage(False)
|
||||
|
||||
if msg.size() > 0:
|
||||
count += msg.size()
|
||||
self.send("CONFIG", msg)
|
||||
msg.clear()
|
||||
sys.stderr.write("\r + sending notifiers: %d%%" % (count * 100 / ncount))
|
||||
sys.stderr.flush()
|
||||
|
||||
sys.stderr.write("\n")
|
||||
|
||||
# Notify about end of synchronization
|
||||
cfgmsg.creationInfo().setCreationTime(seiscomp.core.Time.GMT())
|
||||
cfgmsg.isFinished = True
|
||||
self.send(seiscomp.client.Protocol.STATUS_GROUP, cfgmsg)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
app = ConfigDBUpdater(len(sys.argv), sys.argv)
|
||||
return app()
|
||||
1984
lib/python/seiscomp/client.py
Normal file
1984
lib/python/seiscomp/client.py
Normal file
File diff suppressed because it is too large
Load Diff
857
lib/python/seiscomp/config.py
Normal file
857
lib/python/seiscomp/config.py
Normal file
@ -0,0 +1,857 @@
|
||||
# This file was automatically generated by SWIG (http://www.swig.org).
|
||||
# Version 4.0.2
|
||||
#
|
||||
# Do not make changes to this file unless you know what you are doing--modify
|
||||
# the SWIG interface file instead.
|
||||
|
||||
from sys import version_info as _swig_python_version_info
|
||||
if _swig_python_version_info < (2, 7, 0):
|
||||
raise RuntimeError("Python 2.7 or later required")
|
||||
|
||||
# Import the low-level C/C++ module
|
||||
if __package__ or "." in __name__:
|
||||
from . import _config
|
||||
else:
|
||||
import _config
|
||||
|
||||
try:
|
||||
import builtins as __builtin__
|
||||
except ImportError:
|
||||
import __builtin__
|
||||
|
||||
def _swig_repr(self):
|
||||
try:
|
||||
strthis = "proxy of " + self.this.__repr__()
|
||||
except __builtin__.Exception:
|
||||
strthis = ""
|
||||
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_instance_variable(set):
|
||||
def set_instance_attr(self, name, value):
|
||||
if name == "thisown":
|
||||
self.this.own(value)
|
||||
elif name == "this":
|
||||
set(self, name, value)
|
||||
elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
|
||||
set(self, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add instance attributes to %s" % self)
|
||||
return set_instance_attr
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_class_variable(set):
|
||||
def set_class_attr(cls, name, value):
|
||||
if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
|
||||
set(cls, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add class attributes to %s" % cls)
|
||||
return set_class_attr
|
||||
|
||||
|
||||
def _swig_add_metaclass(metaclass):
|
||||
"""Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
|
||||
def wrapper(cls):
|
||||
return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
|
||||
return wrapper
|
||||
|
||||
|
||||
class _SwigNonDynamicMeta(type):
|
||||
"""Meta class to enforce nondynamic attributes (no new attributes) for a class"""
|
||||
__setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
|
||||
|
||||
|
||||
import weakref
|
||||
|
||||
class SwigPyIterator(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
raise AttributeError("No constructor defined - class is abstract")
|
||||
__repr__ = _swig_repr
|
||||
__swig_destroy__ = _config.delete_SwigPyIterator
|
||||
|
||||
def value(self):
|
||||
return _config.SwigPyIterator_value(self)
|
||||
|
||||
def incr(self, n=1):
|
||||
return _config.SwigPyIterator_incr(self, n)
|
||||
|
||||
def decr(self, n=1):
|
||||
return _config.SwigPyIterator_decr(self, n)
|
||||
|
||||
def distance(self, x):
|
||||
return _config.SwigPyIterator_distance(self, x)
|
||||
|
||||
def equal(self, x):
|
||||
return _config.SwigPyIterator_equal(self, x)
|
||||
|
||||
def copy(self):
|
||||
return _config.SwigPyIterator_copy(self)
|
||||
|
||||
def next(self):
|
||||
return _config.SwigPyIterator_next(self)
|
||||
|
||||
def __next__(self):
|
||||
return _config.SwigPyIterator___next__(self)
|
||||
|
||||
def previous(self):
|
||||
return _config.SwigPyIterator_previous(self)
|
||||
|
||||
def advance(self, n):
|
||||
return _config.SwigPyIterator_advance(self, n)
|
||||
|
||||
def __eq__(self, x):
|
||||
return _config.SwigPyIterator___eq__(self, x)
|
||||
|
||||
def __ne__(self, x):
|
||||
return _config.SwigPyIterator___ne__(self, x)
|
||||
|
||||
def __iadd__(self, n):
|
||||
return _config.SwigPyIterator___iadd__(self, n)
|
||||
|
||||
def __isub__(self, n):
|
||||
return _config.SwigPyIterator___isub__(self, n)
|
||||
|
||||
def __add__(self, n):
|
||||
return _config.SwigPyIterator___add__(self, n)
|
||||
|
||||
def __sub__(self, *args):
|
||||
return _config.SwigPyIterator___sub__(self, *args)
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
# Register SwigPyIterator in _config:
|
||||
_config.SwigPyIterator_swigregister(SwigPyIterator)
|
||||
|
||||
ERROR = _config.ERROR
|
||||
WARNING = _config.WARNING
|
||||
INFO = _config.INFO
|
||||
DEBUG = _config.DEBUG
|
||||
class Logger(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
__swig_destroy__ = _config.delete_Logger
|
||||
|
||||
def log(self, arg0, filename, line, msg):
|
||||
return _config.Logger_log(self, arg0, filename, line, msg)
|
||||
|
||||
def __init__(self):
|
||||
if self.__class__ == Logger:
|
||||
_self = None
|
||||
else:
|
||||
_self = self
|
||||
_config.Logger_swiginit(self, _config.new_Logger(_self, ))
|
||||
def __disown__(self):
|
||||
self.this.disown()
|
||||
_config.disown_Logger(self)
|
||||
return weakref.proxy(self)
|
||||
|
||||
# Register Logger in _config:
|
||||
_config.Logger_swigregister(Logger)
|
||||
|
||||
class Exception(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.Exception_swiginit(self, _config.new_Exception(*args))
|
||||
__swig_destroy__ = _config.delete_Exception
|
||||
|
||||
def what(self):
|
||||
return _config.Exception_what(self)
|
||||
|
||||
# Register Exception in _config:
|
||||
_config.Exception_swigregister(Exception)
|
||||
cvar = _config.cvar
|
||||
|
||||
class OptionNotFoundException(Exception):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.OptionNotFoundException_swiginit(self, _config.new_OptionNotFoundException(*args))
|
||||
__swig_destroy__ = _config.delete_OptionNotFoundException
|
||||
|
||||
# Register OptionNotFoundException in _config:
|
||||
_config.OptionNotFoundException_swigregister(OptionNotFoundException)
|
||||
|
||||
class TypeConversionException(Exception):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.TypeConversionException_swiginit(self, _config.new_TypeConversionException(*args))
|
||||
__swig_destroy__ = _config.delete_TypeConversionException
|
||||
|
||||
# Register TypeConversionException in _config:
|
||||
_config.TypeConversionException_swigregister(TypeConversionException)
|
||||
|
||||
class SyntaxException(Exception):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.SyntaxException_swiginit(self, _config.new_SyntaxException(*args))
|
||||
__swig_destroy__ = _config.delete_SyntaxException
|
||||
|
||||
# Register SyntaxException in _config:
|
||||
_config.SyntaxException_swigregister(SyntaxException)
|
||||
|
||||
class CaseSensitivityException(Exception):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.CaseSensitivityException_swiginit(self, _config.new_CaseSensitivityException(*args))
|
||||
__swig_destroy__ = _config.delete_CaseSensitivityException
|
||||
|
||||
# Register CaseSensitivityException in _config:
|
||||
_config.CaseSensitivityException_swigregister(CaseSensitivityException)
|
||||
|
||||
class Symbol(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.Symbol_swiginit(self, _config.new_Symbol(*args))
|
||||
|
||||
def set(self, name, ns, values, uri, comment, stage=-1):
|
||||
return _config.Symbol_set(self, name, ns, values, uri, comment, stage)
|
||||
|
||||
def __eq__(self, symbol):
|
||||
return _config.Symbol___eq__(self, symbol)
|
||||
|
||||
def toString(self):
|
||||
return _config.Symbol_toString(self)
|
||||
name = property(_config.Symbol_name_get, _config.Symbol_name_set)
|
||||
ns = property(_config.Symbol_ns_get, _config.Symbol_ns_set)
|
||||
content = property(_config.Symbol_content_get, _config.Symbol_content_set)
|
||||
values = property(_config.Symbol_values_get, _config.Symbol_values_set)
|
||||
uri = property(_config.Symbol_uri_get, _config.Symbol_uri_set)
|
||||
comment = property(_config.Symbol_comment_get, _config.Symbol_comment_set)
|
||||
stage = property(_config.Symbol_stage_get, _config.Symbol_stage_set)
|
||||
line = property(_config.Symbol_line_get, _config.Symbol_line_set)
|
||||
__swig_destroy__ = _config.delete_Symbol
|
||||
|
||||
# Register Symbol in _config:
|
||||
_config.Symbol_swigregister(Symbol)
|
||||
|
||||
class SymbolTable(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self):
|
||||
_config.SymbolTable_swiginit(self, _config.new_SymbolTable())
|
||||
|
||||
def setCaseSensitivityCheck(self, arg2):
|
||||
return _config.SymbolTable_setCaseSensitivityCheck(self, arg2)
|
||||
|
||||
def setLogger(self, arg2):
|
||||
return _config.SymbolTable_setLogger(self, arg2)
|
||||
|
||||
def logger(self):
|
||||
return _config.SymbolTable_logger(self)
|
||||
|
||||
def add(self, *args):
|
||||
return _config.SymbolTable_add(self, *args)
|
||||
|
||||
def get(self, *args):
|
||||
return _config.SymbolTable_get(self, *args)
|
||||
|
||||
def remove(self, name):
|
||||
return _config.SymbolTable_remove(self, name)
|
||||
|
||||
def incrementObjectCount(self):
|
||||
return _config.SymbolTable_incrementObjectCount(self)
|
||||
|
||||
def decrementObjectCount(self):
|
||||
return _config.SymbolTable_decrementObjectCount(self)
|
||||
|
||||
def objectCount(self):
|
||||
return _config.SymbolTable_objectCount(self)
|
||||
|
||||
def toString(self):
|
||||
return _config.SymbolTable_toString(self)
|
||||
|
||||
def hasFileBeenIncluded(self, fileName):
|
||||
return _config.SymbolTable_hasFileBeenIncluded(self, fileName)
|
||||
|
||||
def addToIncludedFiles(self, fileName):
|
||||
return _config.SymbolTable_addToIncludedFiles(self, fileName)
|
||||
|
||||
def includesBegin(self):
|
||||
return _config.SymbolTable_includesBegin(self)
|
||||
|
||||
def includesEnd(self):
|
||||
return _config.SymbolTable_includesEnd(self)
|
||||
|
||||
def begin(self):
|
||||
return _config.SymbolTable_begin(self)
|
||||
|
||||
def end(self):
|
||||
return _config.SymbolTable_end(self)
|
||||
__swig_destroy__ = _config.delete_SymbolTable
|
||||
|
||||
# Register SymbolTable in _config:
|
||||
_config.SymbolTable_swigregister(SymbolTable)
|
||||
|
||||
class Config(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self):
|
||||
_config.Config_swiginit(self, _config.new_Config())
|
||||
__swig_destroy__ = _config.delete_Config
|
||||
|
||||
def setCaseSensitivityCheck(self, arg2):
|
||||
return _config.Config_setCaseSensitivityCheck(self, arg2)
|
||||
|
||||
def readConfig(self, file, stage=-1, raw=False):
|
||||
return _config.Config_readConfig(self, file, stage, raw)
|
||||
|
||||
def writeConfig(self, *args):
|
||||
return _config.Config_writeConfig(self, *args)
|
||||
|
||||
def setLogger(self, logger):
|
||||
return _config.Config_setLogger(self, logger)
|
||||
|
||||
def symbolsToString(self):
|
||||
return _config.Config_symbolsToString(self)
|
||||
|
||||
def names(self):
|
||||
return _config.Config_names(self)
|
||||
|
||||
def visitedFilesToString(self):
|
||||
return _config.Config_visitedFilesToString(self)
|
||||
|
||||
def getInt(self, *args):
|
||||
return _config.Config_getInt(self, *args)
|
||||
|
||||
def setInt(self, name, value):
|
||||
return _config.Config_setInt(self, name, value)
|
||||
|
||||
def getDouble(self, *args):
|
||||
return _config.Config_getDouble(self, *args)
|
||||
|
||||
def setDouble(self, name, value):
|
||||
return _config.Config_setDouble(self, name, value)
|
||||
|
||||
def getBool(self, *args):
|
||||
return _config.Config_getBool(self, *args)
|
||||
|
||||
def setBool(self, name, value):
|
||||
return _config.Config_setBool(self, name, value)
|
||||
|
||||
def getString(self, *args):
|
||||
return _config.Config_getString(self, *args)
|
||||
|
||||
def setString(self, name, value):
|
||||
return _config.Config_setString(self, name, value)
|
||||
|
||||
def remove(self, name):
|
||||
return _config.Config_remove(self, name)
|
||||
|
||||
def getInts(self, *args):
|
||||
return _config.Config_getInts(self, *args)
|
||||
|
||||
def setInts(self, name, values):
|
||||
return _config.Config_setInts(self, name, values)
|
||||
|
||||
def getDoubles(self, *args):
|
||||
return _config.Config_getDoubles(self, *args)
|
||||
|
||||
def setDoubles(self, name, values):
|
||||
return _config.Config_setDoubles(self, name, values)
|
||||
|
||||
def getBools(self, *args):
|
||||
return _config.Config_getBools(self, *args)
|
||||
|
||||
def setBools(self, name, values):
|
||||
return _config.Config_setBools(self, name, values)
|
||||
|
||||
def getStrings(self, *args):
|
||||
return _config.Config_getStrings(self, *args)
|
||||
|
||||
def setStrings(self, name, values):
|
||||
return _config.Config_setStrings(self, name, values)
|
||||
|
||||
def symbolTable(self):
|
||||
return _config.Config_symbolTable(self)
|
||||
|
||||
def eval(self, rvalue, result, resolveReferences=True, errmsg=None):
|
||||
return _config.Config_eval(self, rvalue, result, resolveReferences, errmsg)
|
||||
|
||||
@staticmethod
|
||||
def Eval(rvalue, result, resolveReferences=True, symtab=None, errmsg=None):
|
||||
return _config.Config_Eval(rvalue, result, resolveReferences, symtab, errmsg)
|
||||
|
||||
@staticmethod
|
||||
def writeValues(os, symbol, multilineLists=False):
|
||||
return _config.Config_writeValues(os, symbol, multilineLists)
|
||||
|
||||
@staticmethod
|
||||
def writeContent(os, symbol, multilineLists=False):
|
||||
return _config.Config_writeContent(os, symbol, multilineLists)
|
||||
|
||||
@staticmethod
|
||||
def writeSymbol(os, symbol, multilineLists=False):
|
||||
return _config.Config_writeSymbol(os, symbol, multilineLists)
|
||||
|
||||
@staticmethod
|
||||
def escapeIdentifier(arg1):
|
||||
return _config.Config_escapeIdentifier(arg1)
|
||||
|
||||
def trackVariables(self, enabled):
|
||||
return _config.Config_trackVariables(self, enabled)
|
||||
|
||||
def getVariables(self):
|
||||
return _config.Config_getVariables(self)
|
||||
|
||||
def escape(self, arg2):
|
||||
return _config.Config_escape(self, arg2)
|
||||
|
||||
# Register Config in _config:
|
||||
_config.Config_swigregister(Config)
|
||||
|
||||
def Config_Eval(rvalue, result, resolveReferences=True, symtab=None, errmsg=None):
|
||||
return _config.Config_Eval(rvalue, result, resolveReferences, symtab, errmsg)
|
||||
|
||||
def Config_writeValues(os, symbol, multilineLists=False):
|
||||
return _config.Config_writeValues(os, symbol, multilineLists)
|
||||
|
||||
def Config_writeContent(os, symbol, multilineLists=False):
|
||||
return _config.Config_writeContent(os, symbol, multilineLists)
|
||||
|
||||
def Config_writeSymbol(os, symbol, multilineLists=False):
|
||||
return _config.Config_writeSymbol(os, symbol, multilineLists)
|
||||
|
||||
def Config_escapeIdentifier(arg1):
|
||||
return _config.Config_escapeIdentifier(arg1)
|
||||
|
||||
class VectorStr(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def iterator(self):
|
||||
return _config.VectorStr_iterator(self)
|
||||
def __iter__(self):
|
||||
return self.iterator()
|
||||
|
||||
def __nonzero__(self):
|
||||
return _config.VectorStr___nonzero__(self)
|
||||
|
||||
def __bool__(self):
|
||||
return _config.VectorStr___bool__(self)
|
||||
|
||||
def __len__(self):
|
||||
return _config.VectorStr___len__(self)
|
||||
|
||||
def __getslice__(self, i, j):
|
||||
return _config.VectorStr___getslice__(self, i, j)
|
||||
|
||||
def __setslice__(self, *args):
|
||||
return _config.VectorStr___setslice__(self, *args)
|
||||
|
||||
def __delslice__(self, i, j):
|
||||
return _config.VectorStr___delslice__(self, i, j)
|
||||
|
||||
def __delitem__(self, *args):
|
||||
return _config.VectorStr___delitem__(self, *args)
|
||||
|
||||
def __getitem__(self, *args):
|
||||
return _config.VectorStr___getitem__(self, *args)
|
||||
|
||||
def __setitem__(self, *args):
|
||||
return _config.VectorStr___setitem__(self, *args)
|
||||
|
||||
def pop(self):
|
||||
return _config.VectorStr_pop(self)
|
||||
|
||||
def append(self, x):
|
||||
return _config.VectorStr_append(self, x)
|
||||
|
||||
def empty(self):
|
||||
return _config.VectorStr_empty(self)
|
||||
|
||||
def size(self):
|
||||
return _config.VectorStr_size(self)
|
||||
|
||||
def swap(self, v):
|
||||
return _config.VectorStr_swap(self, v)
|
||||
|
||||
def begin(self):
|
||||
return _config.VectorStr_begin(self)
|
||||
|
||||
def end(self):
|
||||
return _config.VectorStr_end(self)
|
||||
|
||||
def rbegin(self):
|
||||
return _config.VectorStr_rbegin(self)
|
||||
|
||||
def rend(self):
|
||||
return _config.VectorStr_rend(self)
|
||||
|
||||
def clear(self):
|
||||
return _config.VectorStr_clear(self)
|
||||
|
||||
def get_allocator(self):
|
||||
return _config.VectorStr_get_allocator(self)
|
||||
|
||||
def pop_back(self):
|
||||
return _config.VectorStr_pop_back(self)
|
||||
|
||||
def erase(self, *args):
|
||||
return _config.VectorStr_erase(self, *args)
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.VectorStr_swiginit(self, _config.new_VectorStr(*args))
|
||||
|
||||
def push_back(self, x):
|
||||
return _config.VectorStr_push_back(self, x)
|
||||
|
||||
def front(self):
|
||||
return _config.VectorStr_front(self)
|
||||
|
||||
def back(self):
|
||||
return _config.VectorStr_back(self)
|
||||
|
||||
def assign(self, n, x):
|
||||
return _config.VectorStr_assign(self, n, x)
|
||||
|
||||
def resize(self, *args):
|
||||
return _config.VectorStr_resize(self, *args)
|
||||
|
||||
def insert(self, *args):
|
||||
return _config.VectorStr_insert(self, *args)
|
||||
|
||||
def reserve(self, n):
|
||||
return _config.VectorStr_reserve(self, n)
|
||||
|
||||
def capacity(self):
|
||||
return _config.VectorStr_capacity(self)
|
||||
__swig_destroy__ = _config.delete_VectorStr
|
||||
|
||||
# Register VectorStr in _config:
|
||||
_config.VectorStr_swigregister(VectorStr)
|
||||
|
||||
class VectorInt(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def iterator(self):
|
||||
return _config.VectorInt_iterator(self)
|
||||
def __iter__(self):
|
||||
return self.iterator()
|
||||
|
||||
def __nonzero__(self):
|
||||
return _config.VectorInt___nonzero__(self)
|
||||
|
||||
def __bool__(self):
|
||||
return _config.VectorInt___bool__(self)
|
||||
|
||||
def __len__(self):
|
||||
return _config.VectorInt___len__(self)
|
||||
|
||||
def __getslice__(self, i, j):
|
||||
return _config.VectorInt___getslice__(self, i, j)
|
||||
|
||||
def __setslice__(self, *args):
|
||||
return _config.VectorInt___setslice__(self, *args)
|
||||
|
||||
def __delslice__(self, i, j):
|
||||
return _config.VectorInt___delslice__(self, i, j)
|
||||
|
||||
def __delitem__(self, *args):
|
||||
return _config.VectorInt___delitem__(self, *args)
|
||||
|
||||
def __getitem__(self, *args):
|
||||
return _config.VectorInt___getitem__(self, *args)
|
||||
|
||||
def __setitem__(self, *args):
|
||||
return _config.VectorInt___setitem__(self, *args)
|
||||
|
||||
def pop(self):
|
||||
return _config.VectorInt_pop(self)
|
||||
|
||||
def append(self, x):
|
||||
return _config.VectorInt_append(self, x)
|
||||
|
||||
def empty(self):
|
||||
return _config.VectorInt_empty(self)
|
||||
|
||||
def size(self):
|
||||
return _config.VectorInt_size(self)
|
||||
|
||||
def swap(self, v):
|
||||
return _config.VectorInt_swap(self, v)
|
||||
|
||||
def begin(self):
|
||||
return _config.VectorInt_begin(self)
|
||||
|
||||
def end(self):
|
||||
return _config.VectorInt_end(self)
|
||||
|
||||
def rbegin(self):
|
||||
return _config.VectorInt_rbegin(self)
|
||||
|
||||
def rend(self):
|
||||
return _config.VectorInt_rend(self)
|
||||
|
||||
def clear(self):
|
||||
return _config.VectorInt_clear(self)
|
||||
|
||||
def get_allocator(self):
|
||||
return _config.VectorInt_get_allocator(self)
|
||||
|
||||
def pop_back(self):
|
||||
return _config.VectorInt_pop_back(self)
|
||||
|
||||
def erase(self, *args):
|
||||
return _config.VectorInt_erase(self, *args)
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.VectorInt_swiginit(self, _config.new_VectorInt(*args))
|
||||
|
||||
def push_back(self, x):
|
||||
return _config.VectorInt_push_back(self, x)
|
||||
|
||||
def front(self):
|
||||
return _config.VectorInt_front(self)
|
||||
|
||||
def back(self):
|
||||
return _config.VectorInt_back(self)
|
||||
|
||||
def assign(self, n, x):
|
||||
return _config.VectorInt_assign(self, n, x)
|
||||
|
||||
def resize(self, *args):
|
||||
return _config.VectorInt_resize(self, *args)
|
||||
|
||||
def insert(self, *args):
|
||||
return _config.VectorInt_insert(self, *args)
|
||||
|
||||
def reserve(self, n):
|
||||
return _config.VectorInt_reserve(self, n)
|
||||
|
||||
def capacity(self):
|
||||
return _config.VectorInt_capacity(self)
|
||||
__swig_destroy__ = _config.delete_VectorInt
|
||||
|
||||
# Register VectorInt in _config:
|
||||
_config.VectorInt_swigregister(VectorInt)
|
||||
|
||||
class VectorDouble(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def iterator(self):
|
||||
return _config.VectorDouble_iterator(self)
|
||||
def __iter__(self):
|
||||
return self.iterator()
|
||||
|
||||
def __nonzero__(self):
|
||||
return _config.VectorDouble___nonzero__(self)
|
||||
|
||||
def __bool__(self):
|
||||
return _config.VectorDouble___bool__(self)
|
||||
|
||||
def __len__(self):
|
||||
return _config.VectorDouble___len__(self)
|
||||
|
||||
def __getslice__(self, i, j):
|
||||
return _config.VectorDouble___getslice__(self, i, j)
|
||||
|
||||
def __setslice__(self, *args):
|
||||
return _config.VectorDouble___setslice__(self, *args)
|
||||
|
||||
def __delslice__(self, i, j):
|
||||
return _config.VectorDouble___delslice__(self, i, j)
|
||||
|
||||
def __delitem__(self, *args):
|
||||
return _config.VectorDouble___delitem__(self, *args)
|
||||
|
||||
def __getitem__(self, *args):
|
||||
return _config.VectorDouble___getitem__(self, *args)
|
||||
|
||||
def __setitem__(self, *args):
|
||||
return _config.VectorDouble___setitem__(self, *args)
|
||||
|
||||
def pop(self):
|
||||
return _config.VectorDouble_pop(self)
|
||||
|
||||
def append(self, x):
|
||||
return _config.VectorDouble_append(self, x)
|
||||
|
||||
def empty(self):
|
||||
return _config.VectorDouble_empty(self)
|
||||
|
||||
def size(self):
|
||||
return _config.VectorDouble_size(self)
|
||||
|
||||
def swap(self, v):
|
||||
return _config.VectorDouble_swap(self, v)
|
||||
|
||||
def begin(self):
|
||||
return _config.VectorDouble_begin(self)
|
||||
|
||||
def end(self):
|
||||
return _config.VectorDouble_end(self)
|
||||
|
||||
def rbegin(self):
|
||||
return _config.VectorDouble_rbegin(self)
|
||||
|
||||
def rend(self):
|
||||
return _config.VectorDouble_rend(self)
|
||||
|
||||
def clear(self):
|
||||
return _config.VectorDouble_clear(self)
|
||||
|
||||
def get_allocator(self):
|
||||
return _config.VectorDouble_get_allocator(self)
|
||||
|
||||
def pop_back(self):
|
||||
return _config.VectorDouble_pop_back(self)
|
||||
|
||||
def erase(self, *args):
|
||||
return _config.VectorDouble_erase(self, *args)
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.VectorDouble_swiginit(self, _config.new_VectorDouble(*args))
|
||||
|
||||
def push_back(self, x):
|
||||
return _config.VectorDouble_push_back(self, x)
|
||||
|
||||
def front(self):
|
||||
return _config.VectorDouble_front(self)
|
||||
|
||||
def back(self):
|
||||
return _config.VectorDouble_back(self)
|
||||
|
||||
def assign(self, n, x):
|
||||
return _config.VectorDouble_assign(self, n, x)
|
||||
|
||||
def resize(self, *args):
|
||||
return _config.VectorDouble_resize(self, *args)
|
||||
|
||||
def insert(self, *args):
|
||||
return _config.VectorDouble_insert(self, *args)
|
||||
|
||||
def reserve(self, n):
|
||||
return _config.VectorDouble_reserve(self, n)
|
||||
|
||||
def capacity(self):
|
||||
return _config.VectorDouble_capacity(self)
|
||||
__swig_destroy__ = _config.delete_VectorDouble
|
||||
|
||||
# Register VectorDouble in _config:
|
||||
_config.VectorDouble_swigregister(VectorDouble)
|
||||
|
||||
class VectorBool(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def iterator(self):
|
||||
return _config.VectorBool_iterator(self)
|
||||
def __iter__(self):
|
||||
return self.iterator()
|
||||
|
||||
def __nonzero__(self):
|
||||
return _config.VectorBool___nonzero__(self)
|
||||
|
||||
def __bool__(self):
|
||||
return _config.VectorBool___bool__(self)
|
||||
|
||||
def __len__(self):
|
||||
return _config.VectorBool___len__(self)
|
||||
|
||||
def __getslice__(self, i, j):
|
||||
return _config.VectorBool___getslice__(self, i, j)
|
||||
|
||||
def __setslice__(self, *args):
|
||||
return _config.VectorBool___setslice__(self, *args)
|
||||
|
||||
def __delslice__(self, i, j):
|
||||
return _config.VectorBool___delslice__(self, i, j)
|
||||
|
||||
def __delitem__(self, *args):
|
||||
return _config.VectorBool___delitem__(self, *args)
|
||||
|
||||
def __getitem__(self, *args):
|
||||
return _config.VectorBool___getitem__(self, *args)
|
||||
|
||||
def __setitem__(self, *args):
|
||||
return _config.VectorBool___setitem__(self, *args)
|
||||
|
||||
def pop(self):
|
||||
return _config.VectorBool_pop(self)
|
||||
|
||||
def append(self, x):
|
||||
return _config.VectorBool_append(self, x)
|
||||
|
||||
def empty(self):
|
||||
return _config.VectorBool_empty(self)
|
||||
|
||||
def size(self):
|
||||
return _config.VectorBool_size(self)
|
||||
|
||||
def swap(self, v):
|
||||
return _config.VectorBool_swap(self, v)
|
||||
|
||||
def begin(self):
|
||||
return _config.VectorBool_begin(self)
|
||||
|
||||
def end(self):
|
||||
return _config.VectorBool_end(self)
|
||||
|
||||
def rbegin(self):
|
||||
return _config.VectorBool_rbegin(self)
|
||||
|
||||
def rend(self):
|
||||
return _config.VectorBool_rend(self)
|
||||
|
||||
def clear(self):
|
||||
return _config.VectorBool_clear(self)
|
||||
|
||||
def get_allocator(self):
|
||||
return _config.VectorBool_get_allocator(self)
|
||||
|
||||
def pop_back(self):
|
||||
return _config.VectorBool_pop_back(self)
|
||||
|
||||
def erase(self, *args):
|
||||
return _config.VectorBool_erase(self, *args)
|
||||
|
||||
def __init__(self, *args):
|
||||
_config.VectorBool_swiginit(self, _config.new_VectorBool(*args))
|
||||
|
||||
def push_back(self, x):
|
||||
return _config.VectorBool_push_back(self, x)
|
||||
|
||||
def front(self):
|
||||
return _config.VectorBool_front(self)
|
||||
|
||||
def back(self):
|
||||
return _config.VectorBool_back(self)
|
||||
|
||||
def assign(self, n, x):
|
||||
return _config.VectorBool_assign(self, n, x)
|
||||
|
||||
def resize(self, *args):
|
||||
return _config.VectorBool_resize(self, *args)
|
||||
|
||||
def insert(self, *args):
|
||||
return _config.VectorBool_insert(self, *args)
|
||||
|
||||
def reserve(self, n):
|
||||
return _config.VectorBool_reserve(self, n)
|
||||
|
||||
def capacity(self):
|
||||
return _config.VectorBool_capacity(self)
|
||||
__swig_destroy__ = _config.delete_VectorBool
|
||||
|
||||
# Register VectorBool in _config:
|
||||
_config.VectorBool_swigregister(VectorBool)
|
||||
|
||||
|
||||
|
||||
2769
lib/python/seiscomp/core.py
Normal file
2769
lib/python/seiscomp/core.py
Normal file
File diff suppressed because it is too large
Load Diff
23447
lib/python/seiscomp/datamodel/__init__.py
Normal file
23447
lib/python/seiscomp/datamodel/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
BIN
lib/python/seiscomp/datamodel/_datamodel.so
Normal file
BIN
lib/python/seiscomp/datamodel/_datamodel.so
Normal file
Binary file not shown.
0
lib/python/seiscomp/fdsnws/__init__.py
Normal file
0
lib/python/seiscomp/fdsnws/__init__.py
Normal file
85
lib/python/seiscomp/fdsnws/authresource.py
Normal file
85
lib/python/seiscomp/fdsnws/authresource.py
Normal file
@ -0,0 +1,85 @@
|
||||
################################################################################
|
||||
# Copyright (C) 2013-2014 by gempa GmbH
|
||||
#
|
||||
# HTTP -- Utility methods which generate HTTP result strings
|
||||
#
|
||||
# Author: Stephan Herrnkind
|
||||
# Email: herrnkind@gempa.de
|
||||
################################################################################
|
||||
|
||||
import base64
|
||||
import datetime
|
||||
import hashlib
|
||||
import json
|
||||
import time
|
||||
import dateutil.parser
|
||||
|
||||
from twisted.web import http
|
||||
|
||||
import gnupg
|
||||
|
||||
import seiscomp.logging
|
||||
|
||||
from .utils import accessLog, u_str
|
||||
|
||||
from .http import BaseResource
|
||||
|
||||
|
||||
################################################################################
|
||||
class AuthResource(BaseResource):
|
||||
isLeaf = True
|
||||
|
||||
def __init__(self, version, gnupghome, userdb):
|
||||
super().__init__(version)
|
||||
|
||||
self.__gpg = gnupg.GPG(gnupghome=gnupghome)
|
||||
self.__userdb = userdb
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render_POST(self, request):
|
||||
request.setHeader("Content-Type", "text/plain; charset=utf-8")
|
||||
|
||||
try:
|
||||
verified = self.__gpg.decrypt(request.content.getvalue())
|
||||
|
||||
except OSError as e:
|
||||
msg = "gpg decrypt error"
|
||||
seiscomp.logging.warning(f"{msg}: {e}")
|
||||
return self.renderErrorPage(request, http.INTERNAL_SERVER_ERROR, msg)
|
||||
|
||||
except Exception as e:
|
||||
msg = "invalid token"
|
||||
seiscomp.logging.warning(f"{msg}: {e}")
|
||||
return self.renderErrorPage(request, http.BAD_REQUEST, msg)
|
||||
|
||||
if verified.trust_level is None or verified.trust_level < verified.TRUST_FULLY:
|
||||
msg = "token has invalid signature"
|
||||
seiscomp.logging.warning(msg)
|
||||
return self.renderErrorPage(request, http.BAD_REQUEST, msg)
|
||||
|
||||
try:
|
||||
attributes = json.loads(u_str(verified.data))
|
||||
td = dateutil.parser.parse(
|
||||
attributes["valid_until"]
|
||||
) - datetime.datetime.now(dateutil.tz.tzutc())
|
||||
lifetime = td.seconds + td.days * 24 * 3600
|
||||
|
||||
except Exception as e:
|
||||
msg = "token has invalid validity"
|
||||
seiscomp.logging.warning(f"{msg}: {e}")
|
||||
return self.renderErrorPage(request, http.BAD_REQUEST, msg)
|
||||
|
||||
if lifetime <= 0:
|
||||
msg = "token is expired"
|
||||
seiscomp.logging.warning(msg)
|
||||
return self.renderErrorPage(request, http.BAD_REQUEST, msg)
|
||||
|
||||
userid = base64.urlsafe_b64encode(hashlib.sha256(verified.data).digest()[:18])
|
||||
password = self.__userdb.addUser(
|
||||
u_str(userid),
|
||||
attributes,
|
||||
time.time() + min(lifetime, 24 * 3600),
|
||||
u_str(verified.data),
|
||||
)
|
||||
accessLog(request, None, http.OK, len(userid) + len(password) + 1, None)
|
||||
return userid + b":" + password
|
||||
1442
lib/python/seiscomp/fdsnws/availability.py
Normal file
1442
lib/python/seiscomp/fdsnws/availability.py
Normal file
File diff suppressed because it is too large
Load Diff
796
lib/python/seiscomp/fdsnws/dataselect.py
Normal file
796
lib/python/seiscomp/fdsnws/dataselect.py
Normal file
@ -0,0 +1,796 @@
|
||||
################################################################################
|
||||
# Copyright (C) 2013-2014 by gempa GmbH
|
||||
#
|
||||
# FDSNDataSelect -- Implements the fdsnws-dataselect Web service, see
|
||||
# http://www.fdsn.org/webservices/
|
||||
#
|
||||
# Feature notes:
|
||||
# - 'quality' request parameter not implemented (information not available in
|
||||
# SeisComP)
|
||||
# - 'minimumlength' parameter is not implemented
|
||||
# - 'longestonly' parameter is not implemented
|
||||
#
|
||||
# Author: Stephan Herrnkind
|
||||
# Email: herrnkind@gempa.de
|
||||
################################################################################
|
||||
|
||||
import time
|
||||
|
||||
from io import BytesIO
|
||||
|
||||
import dateutil.parser
|
||||
|
||||
from twisted.cred import portal
|
||||
from twisted.web import http, resource, server
|
||||
from twisted.internet import interfaces, reactor
|
||||
|
||||
from zope.interface import implementer
|
||||
|
||||
from seiscomp import logging, mseedlite
|
||||
|
||||
from seiscomp.client import Application
|
||||
from seiscomp.core import Array, Record, Time
|
||||
from seiscomp.io import RecordInput, RecordStream
|
||||
|
||||
from .http import HTTP, BaseResource
|
||||
from .request import RequestOptions
|
||||
from . import utils
|
||||
|
||||
from .reqtrack import RequestTrackerDB
|
||||
from .fastsds import SDS
|
||||
|
||||
VERSION = "1.1.3"
|
||||
|
||||
################################################################################
|
||||
|
||||
|
||||
class _DataSelectRequestOptions(RequestOptions):
|
||||
MinTime = Time(0, 1)
|
||||
|
||||
PQuality = ["quality"]
|
||||
PMinimumLength = ["minimumlength"]
|
||||
PLongestOnly = ["longestonly"]
|
||||
|
||||
QualityValues = ["B", "D", "M", "Q", "R"]
|
||||
OutputFormats = ["miniseed", "mseed"]
|
||||
|
||||
POSTParams = RequestOptions.POSTParams + PQuality + PMinimumLength + PLongestOnly
|
||||
GETParams = RequestOptions.GETParams + POSTParams
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self.service = "fdsnws-dataselect"
|
||||
|
||||
self.quality = self.QualityValues[0]
|
||||
self.minimumLength = None
|
||||
self.longestOnly = None
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def _checkTimes(self, realtimeGap):
|
||||
maxEndTime = Time(self.accessTime)
|
||||
if realtimeGap is not None:
|
||||
maxEndTime -= Time(realtimeGap, 0)
|
||||
|
||||
for ro in self.streams:
|
||||
# create time if non was specified
|
||||
if ro.time is None:
|
||||
ro.time = RequestOptions.Time()
|
||||
# restrict time to 1970 - now
|
||||
if ro.time.start is None or ro.time.start < self.MinTime:
|
||||
ro.time.start = self.MinTime
|
||||
if ro.time.end is None or ro.time.end > maxEndTime:
|
||||
ro.time.end = maxEndTime
|
||||
|
||||
# remove items with start time >= end time
|
||||
self.streams = [x for x in self.streams if x.time.start < x.time.end]
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parse(self):
|
||||
# quality (optional), currently not supported
|
||||
key, value = self.getFirstValue(self.PQuality)
|
||||
if value is not None:
|
||||
value = value.upper()
|
||||
if value in self.QualityValues:
|
||||
self.quality = value
|
||||
else:
|
||||
self.raiseValueError(key)
|
||||
|
||||
# minimumlength(optional), currently not supported
|
||||
self.minimumLength = self.parseFloat(self.PMinimumLength, 0)
|
||||
|
||||
# longestonly (optional), currently not supported
|
||||
self.longestOnly = self.parseBool(self.PLongestOnly)
|
||||
|
||||
# generic parameters
|
||||
self.parseTime()
|
||||
self.parseChannel()
|
||||
self.parseOutput()
|
||||
|
||||
|
||||
################################################################################
|
||||
class _MyRecordStream:
|
||||
def __init__(self, url, trackerList, bufferSize):
|
||||
self.__url = url
|
||||
self.__trackerList = trackerList
|
||||
self.__bufferSize = bufferSize
|
||||
self.__tw = []
|
||||
|
||||
def addStream(self, net, sta, loc, cha, startt, endt, restricted, archNet):
|
||||
self.__tw.append((net, sta, loc, cha, startt, endt, restricted, archNet))
|
||||
|
||||
@staticmethod
|
||||
def __override_network(data, net):
|
||||
inp = BytesIO(data)
|
||||
out = BytesIO()
|
||||
|
||||
for rec in mseedlite.Input(inp):
|
||||
rec.net = net
|
||||
rec_len_exp = 9
|
||||
|
||||
while (1 << rec_len_exp) < rec.size:
|
||||
rec_len_exp += 1
|
||||
|
||||
rec.write(out, rec_len_exp)
|
||||
|
||||
return out.getvalue()
|
||||
|
||||
def input(self):
|
||||
fastsdsPrefix = "fastsds://"
|
||||
|
||||
if self.__url.startswith(fastsdsPrefix):
|
||||
fastsds = SDS(self.__url[len(fastsdsPrefix) :])
|
||||
|
||||
else:
|
||||
fastsds = None
|
||||
|
||||
for net, sta, loc, cha, startt, endt, restricted, archNet in self.__tw:
|
||||
if not archNet:
|
||||
archNet = net
|
||||
|
||||
size = 0
|
||||
|
||||
if fastsds:
|
||||
start = dateutil.parser.parse(startt.iso()).replace(tzinfo=None)
|
||||
end = dateutil.parser.parse(endt.iso()).replace(tzinfo=None)
|
||||
|
||||
for data in fastsds.getRawBytes(
|
||||
start, end, archNet, sta, loc, cha, self.__bufferSize
|
||||
):
|
||||
size += len(data)
|
||||
|
||||
if archNet == net:
|
||||
yield data
|
||||
|
||||
else:
|
||||
try:
|
||||
yield self.__override_network(data, net)
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"could not override network code: {e}")
|
||||
|
||||
else:
|
||||
rs = RecordStream.Open(self.__url)
|
||||
|
||||
if rs is None:
|
||||
logging.error("could not open record stream")
|
||||
break
|
||||
|
||||
rs.addStream(archNet, sta, loc, cha, startt, endt)
|
||||
rsInput = RecordInput(rs, Array.INT, Record.SAVE_RAW)
|
||||
eof = False
|
||||
|
||||
while not eof:
|
||||
data = b""
|
||||
|
||||
while len(data) < self.__bufferSize:
|
||||
try:
|
||||
rec = rsInput.next()
|
||||
|
||||
except Exception as e:
|
||||
logging.error(str(e))
|
||||
eof = True
|
||||
break
|
||||
|
||||
if rec is None:
|
||||
eof = True
|
||||
break
|
||||
|
||||
data += rec.raw().str()
|
||||
|
||||
if data:
|
||||
size += len(data)
|
||||
|
||||
if archNet == net:
|
||||
yield data
|
||||
|
||||
else:
|
||||
try:
|
||||
yield self.__override_network(data, net)
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"could not override network code: {e}")
|
||||
|
||||
for tracker in self.__trackerList:
|
||||
net_class = "t" if net[0] in "0123456789XYZ" else "p"
|
||||
|
||||
if size == 0:
|
||||
tracker.line_status(
|
||||
startt,
|
||||
endt,
|
||||
net,
|
||||
sta,
|
||||
cha,
|
||||
loc,
|
||||
restricted,
|
||||
net_class,
|
||||
True,
|
||||
[],
|
||||
"fdsnws",
|
||||
"NODATA",
|
||||
0,
|
||||
"",
|
||||
)
|
||||
|
||||
else:
|
||||
tracker.line_status(
|
||||
startt,
|
||||
endt,
|
||||
net,
|
||||
sta,
|
||||
cha,
|
||||
loc,
|
||||
restricted,
|
||||
net_class,
|
||||
True,
|
||||
[],
|
||||
"fdsnws",
|
||||
"OK",
|
||||
size,
|
||||
"",
|
||||
)
|
||||
|
||||
|
||||
################################################################################
|
||||
@implementer(interfaces.IPushProducer)
|
||||
class _WaveformProducer:
|
||||
def __init__(self, req, ro, rs, fileName, trackerList):
|
||||
self.req = req
|
||||
self.ro = ro
|
||||
self.it = rs.input()
|
||||
|
||||
self.fileName = fileName
|
||||
self.written = 0
|
||||
|
||||
self.trackerList = trackerList
|
||||
self.paused = False
|
||||
self.stopped = False
|
||||
self.running = False
|
||||
|
||||
def _flush(self, data):
|
||||
if self.stopped:
|
||||
return
|
||||
|
||||
if not self.paused:
|
||||
reactor.callInThread(self._collectData)
|
||||
|
||||
else:
|
||||
self.running = False
|
||||
|
||||
if self.written == 0:
|
||||
self.req.setHeader("Content-Type", "application/vnd.fdsn.mseed")
|
||||
self.req.setHeader(
|
||||
"Content-Disposition", f"attachment; filename={self.fileName}"
|
||||
)
|
||||
|
||||
self.req.write(data)
|
||||
self.written += len(data)
|
||||
|
||||
def _finish(self):
|
||||
if self.stopped:
|
||||
return
|
||||
|
||||
if self.written == 0:
|
||||
msg = "no waveform data found"
|
||||
errorpage = HTTP.renderErrorPage(
|
||||
self.req, http.NO_CONTENT, msg, VERSION, self.ro
|
||||
)
|
||||
|
||||
if errorpage:
|
||||
self.req.write(errorpage)
|
||||
|
||||
for tracker in self.trackerList:
|
||||
tracker.volume_status("fdsnws", "NODATA", 0, "")
|
||||
tracker.request_status("END", "")
|
||||
|
||||
else:
|
||||
logging.debug(
|
||||
f"{self.ro.service}: returned {self.written} bytes of mseed data"
|
||||
)
|
||||
utils.accessLog(self.req, self.ro, http.OK, self.written, None)
|
||||
|
||||
for tracker in self.trackerList:
|
||||
tracker.volume_status("fdsnws", "OK", self.written, "")
|
||||
tracker.request_status("END", "")
|
||||
|
||||
self.req.unregisterProducer()
|
||||
self.req.finish()
|
||||
|
||||
def _collectData(self):
|
||||
try:
|
||||
reactor.callFromThread(self._flush, next(self.it))
|
||||
|
||||
except StopIteration:
|
||||
reactor.callFromThread(self._finish)
|
||||
|
||||
def pauseProducing(self):
|
||||
self.paused = True
|
||||
|
||||
def resumeProducing(self):
|
||||
self.paused = False
|
||||
|
||||
if not self.running:
|
||||
self.running = True
|
||||
reactor.callInThread(self._collectData)
|
||||
|
||||
def stopProducing(self):
|
||||
self.stopped = True
|
||||
|
||||
logging.debug(
|
||||
f"{self.ro.service}: returned {self.written} bytes of mseed data (not "
|
||||
"completed)"
|
||||
)
|
||||
utils.accessLog(self.req, self.ro, http.OK, self.written, "not completed")
|
||||
|
||||
for tracker in self.trackerList:
|
||||
tracker.volume_status("fdsnws", "ERROR", self.written, "")
|
||||
tracker.request_status("END", "")
|
||||
|
||||
self.req.unregisterProducer()
|
||||
self.req.finish()
|
||||
|
||||
|
||||
################################################################################
|
||||
@implementer(portal.IRealm)
|
||||
class FDSNDataSelectRealm:
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self, inv, bufferSize, access):
|
||||
self.__inv = inv
|
||||
self.__bufferSize = bufferSize
|
||||
self.__access = access
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def requestAvatar(self, avatarId, _mind, *interfaces_):
|
||||
if resource.IResource in interfaces_:
|
||||
return (
|
||||
resource.IResource,
|
||||
FDSNDataSelect(
|
||||
self.__inv,
|
||||
self.__bufferSize,
|
||||
self.__access,
|
||||
{"mail": utils.u_str(avatarId), "blacklisted": False},
|
||||
),
|
||||
lambda: None,
|
||||
)
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
################################################################################
|
||||
@implementer(portal.IRealm)
|
||||
class FDSNDataSelectAuthRealm:
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self, inv, bufferSize, access, userdb):
|
||||
self.__inv = inv
|
||||
self.__bufferSize = bufferSize
|
||||
self.__access = access
|
||||
self.__userdb = userdb
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def requestAvatar(self, avatarId, _mind, *interfaces_):
|
||||
if resource.IResource in interfaces_:
|
||||
return (
|
||||
resource.IResource,
|
||||
FDSNDataSelect(
|
||||
self.__inv,
|
||||
self.__bufferSize,
|
||||
self.__access,
|
||||
self.__userdb.getAttributes(utils.u_str(avatarId)),
|
||||
),
|
||||
lambda: None,
|
||||
)
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
################################################################################
|
||||
class FDSNDataSelect(BaseResource):
|
||||
isLeaf = True
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self, inv, bufferSize, access=None, user=None):
|
||||
super().__init__(VERSION)
|
||||
|
||||
self._rsURL = Application.Instance().recordStreamURL()
|
||||
self.__inv = inv
|
||||
self.__access = access
|
||||
self.__user = user
|
||||
self.__bufferSize = bufferSize
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render_OPTIONS(self, req):
|
||||
req.setHeader("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
|
||||
req.setHeader(
|
||||
"Access-Control-Allow-Headers",
|
||||
"Accept, Content-Type, X-Requested-With, Origin",
|
||||
)
|
||||
req.setHeader("Content-Type", "text/plain; charset=utf-8")
|
||||
return ""
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render_GET(self, req):
|
||||
# Parse and validate GET parameters
|
||||
ro = _DataSelectRequestOptions()
|
||||
ro.userName = self.__user and self.__user.get("mail")
|
||||
try:
|
||||
ro.parseGET(req.args)
|
||||
ro.parse()
|
||||
# the GET operation supports exactly one stream filter
|
||||
ro.streams.append(ro)
|
||||
except ValueError as e:
|
||||
logging.warning(str(e))
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
|
||||
|
||||
return self._processRequest(req, ro)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render_POST(self, req):
|
||||
# Parse and validate POST parameters
|
||||
ro = _DataSelectRequestOptions()
|
||||
ro.userName = self.__user and self.__user.get("mail")
|
||||
try:
|
||||
ro.parsePOST(req.content)
|
||||
ro.parse()
|
||||
except ValueError as e:
|
||||
logging.warning(str(e))
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
|
||||
|
||||
return self._processRequest(req, ro)
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
def _networkIter(self, ro):
|
||||
for i in range(self.__inv.networkCount()):
|
||||
net = self.__inv.network(i)
|
||||
|
||||
# network code
|
||||
if ro.channel and not ro.channel.matchNet(net.code()):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if ro.time:
|
||||
try:
|
||||
end = net.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(net.start(), end):
|
||||
continue
|
||||
|
||||
yield net
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def _stationIter(net, ro):
|
||||
for i in range(net.stationCount()):
|
||||
sta = net.station(i)
|
||||
|
||||
# station code
|
||||
if ro.channel and not ro.channel.matchSta(sta.code()):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if ro.time:
|
||||
try:
|
||||
end = sta.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(sta.start(), end):
|
||||
continue
|
||||
|
||||
yield sta
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def _locationIter(sta, ro):
|
||||
for i in range(sta.sensorLocationCount()):
|
||||
loc = sta.sensorLocation(i)
|
||||
|
||||
# location code
|
||||
if ro.channel and not ro.channel.matchLoc(loc.code()):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if ro.time:
|
||||
try:
|
||||
end = loc.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(loc.start(), end):
|
||||
continue
|
||||
|
||||
yield loc
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def _streamIter(loc, ro):
|
||||
for i in range(loc.streamCount()):
|
||||
stream = loc.stream(i)
|
||||
|
||||
# stream code
|
||||
if ro.channel and not ro.channel.matchCha(stream.code()):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if ro.time:
|
||||
try:
|
||||
end = stream.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(stream.start(), end):
|
||||
continue
|
||||
|
||||
yield stream, False
|
||||
|
||||
for i in range(loc.auxStreamCount()):
|
||||
stream = loc.auxStream(i)
|
||||
|
||||
# stream code
|
||||
if ro.channel and not ro.channel.matchCha(stream.code()):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if ro.time:
|
||||
try:
|
||||
end = stream.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(stream.start(), end):
|
||||
continue
|
||||
|
||||
yield stream, True
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def _processRequest(self, req, ro):
|
||||
# pylint: disable=W0212
|
||||
|
||||
if ro.quality not in ("B", "M"):
|
||||
msg = "quality other than 'B' or 'M' not supported"
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
|
||||
|
||||
if ro.minimumLength:
|
||||
msg = "enforcing of minimum record length not supported"
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
|
||||
|
||||
if ro.longestOnly:
|
||||
msg = "limitation to longest segment not supported"
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
|
||||
|
||||
app = Application.Instance()
|
||||
ro._checkTimes(app._realtimeGap)
|
||||
|
||||
maxSamples = None
|
||||
if app._samplesM is not None:
|
||||
maxSamples = app._samplesM * 1000000
|
||||
samples = 0
|
||||
|
||||
trackerList = []
|
||||
userIP = ""
|
||||
|
||||
if app._trackdbEnabled or app._requestLog:
|
||||
xff = req.requestHeaders.getRawHeaders("x-forwarded-for")
|
||||
if xff:
|
||||
userIP = xff[0].split(",")[0].strip()
|
||||
else:
|
||||
userIP = req.getClientIP()
|
||||
|
||||
clientID = req.getHeader("User-Agent")
|
||||
if clientID:
|
||||
clientID = clientID[:80]
|
||||
else:
|
||||
clientID = "fdsnws"
|
||||
|
||||
if app._trackdbEnabled:
|
||||
if ro.userName:
|
||||
userID = ro.userName
|
||||
else:
|
||||
userID = app._trackdbDefaultUser
|
||||
|
||||
reqID = f"ws{str(int(round(time.time() * 1000) - 1420070400000))}"
|
||||
tracker = RequestTrackerDB(
|
||||
clientID,
|
||||
app.connection(),
|
||||
reqID,
|
||||
"WAVEFORM",
|
||||
userID,
|
||||
f"REQUEST WAVEFORM {reqID}",
|
||||
"fdsnws",
|
||||
userIP,
|
||||
req.getClientIP(),
|
||||
)
|
||||
|
||||
trackerList.append(tracker)
|
||||
|
||||
if app._requestLog:
|
||||
tracker = app._requestLog.tracker(ro.service, ro.userName, userIP, clientID)
|
||||
trackerList.append(tracker)
|
||||
|
||||
# Open record stream
|
||||
rs = _MyRecordStream(self._rsURL, trackerList, self.__bufferSize)
|
||||
|
||||
forbidden = None
|
||||
auxStreamsFound = False
|
||||
|
||||
# Add request streams
|
||||
# iterate over inventory networks
|
||||
for s in ro.streams:
|
||||
for net in self._networkIter(s):
|
||||
netRestricted = utils.isRestricted(net)
|
||||
if not trackerList and netRestricted and not self.__user:
|
||||
forbidden = forbidden or (forbidden is None)
|
||||
continue
|
||||
|
||||
for sta in self._stationIter(net, s):
|
||||
staRestricted = utils.isRestricted(sta)
|
||||
if not trackerList and staRestricted and not self.__user:
|
||||
forbidden = forbidden or (forbidden is None)
|
||||
continue
|
||||
|
||||
for loc in self._locationIter(sta, s):
|
||||
for cha, aux in self._streamIter(loc, s):
|
||||
start_time = max(cha.start(), s.time.start)
|
||||
|
||||
try:
|
||||
end_time = min(cha.end(), s.time.end)
|
||||
except ValueError:
|
||||
end_time = s.time.end
|
||||
|
||||
streamRestricted = (
|
||||
netRestricted
|
||||
or staRestricted
|
||||
or utils.isRestricted(cha)
|
||||
)
|
||||
if streamRestricted and (
|
||||
not self.__user
|
||||
or (
|
||||
self.__access
|
||||
and not self.__access.authorize(
|
||||
self.__user,
|
||||
net.code(),
|
||||
sta.code(),
|
||||
loc.code(),
|
||||
cha.code(),
|
||||
start_time,
|
||||
end_time,
|
||||
)
|
||||
)
|
||||
):
|
||||
for tracker in trackerList:
|
||||
net_class = (
|
||||
"t" if net.code()[0] in "0123456789XYZ" else "p"
|
||||
)
|
||||
tracker.line_status(
|
||||
start_time,
|
||||
end_time,
|
||||
net.code(),
|
||||
sta.code(),
|
||||
cha.code(),
|
||||
loc.code(),
|
||||
True,
|
||||
net_class,
|
||||
True,
|
||||
[],
|
||||
"fdsnws",
|
||||
"DENIED",
|
||||
0,
|
||||
"",
|
||||
)
|
||||
|
||||
forbidden = forbidden or (forbidden is None)
|
||||
continue
|
||||
|
||||
forbidden = False
|
||||
|
||||
# aux streams are deprecated, mark aux streams as
|
||||
# present to report warning later on, also do not
|
||||
# count aux stream samples due to their loose
|
||||
# binding to a aux device and source which only
|
||||
# optionally contains a sampling rate
|
||||
if aux:
|
||||
auxStreamsFound = True
|
||||
# enforce maximum sample per request restriction
|
||||
elif maxSamples is not None:
|
||||
try:
|
||||
n = cha.sampleRateNumerator()
|
||||
d = cha.sampleRateDenominator()
|
||||
except ValueError:
|
||||
logging.warning(
|
||||
"skipping stream without sampling rate "
|
||||
f"definition: {net.code()}.{sta.code()}."
|
||||
f"{loc.code()}.{cha.code()}"
|
||||
)
|
||||
continue
|
||||
|
||||
# calculate number of samples for requested
|
||||
# time window
|
||||
diffSec = (end_time - start_time).length()
|
||||
samples += int(diffSec * n / d)
|
||||
if samples > maxSamples:
|
||||
msg = (
|
||||
f"maximum number of {app._samplesM}M samples "
|
||||
"exceeded"
|
||||
)
|
||||
return self.renderErrorPage(
|
||||
req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro
|
||||
)
|
||||
|
||||
logging.debug(
|
||||
f"adding stream: {net.code()}.{sta.code()}.{loc.code()}"
|
||||
f".{cha.code()} {start_time.iso()} - {end_time.iso()}"
|
||||
)
|
||||
rs.addStream(
|
||||
net.code(),
|
||||
sta.code(),
|
||||
loc.code(),
|
||||
cha.code(),
|
||||
start_time,
|
||||
end_time,
|
||||
utils.isRestricted(cha),
|
||||
sta.archiveNetworkCode(),
|
||||
)
|
||||
|
||||
if forbidden:
|
||||
for tracker in trackerList:
|
||||
tracker.volume_status("fdsnws", "DENIED", 0, "")
|
||||
tracker.request_status("END", "")
|
||||
|
||||
msg = "access denied"
|
||||
return self.renderErrorPage(req, http.FORBIDDEN, msg, ro)
|
||||
|
||||
if forbidden is None:
|
||||
for tracker in trackerList:
|
||||
tracker.volume_status("fdsnws", "NODATA", 0, "")
|
||||
tracker.request_status("END", "")
|
||||
|
||||
msg = "no metadata found"
|
||||
return self.renderErrorPage(req, http.NO_CONTENT, msg, ro)
|
||||
|
||||
if auxStreamsFound:
|
||||
msg = (
|
||||
"the request contains at least one auxiliary stream which are "
|
||||
"deprecated"
|
||||
)
|
||||
if maxSamples is not None:
|
||||
msg += (
|
||||
" and whose samples are not included in the maximum sample per "
|
||||
"request limit"
|
||||
)
|
||||
logging.info(msg)
|
||||
|
||||
# Build output filename
|
||||
fileName = (
|
||||
Application.Instance()._fileNamePrefix.replace(
|
||||
"%time", time.strftime("%Y-%m-%dT%H:%M:%S")
|
||||
)
|
||||
+ ".mseed"
|
||||
)
|
||||
|
||||
# Create producer for async IO
|
||||
prod = _WaveformProducer(req, ro, rs, fileName, trackerList)
|
||||
req.registerProducer(prod, True)
|
||||
prod.resumeProducing()
|
||||
|
||||
# The request is handled by the deferred object
|
||||
return server.NOT_DONE_YET
|
||||
|
||||
|
||||
# vim: ts=4 et
|
||||
1020
lib/python/seiscomp/fdsnws/event.py
Normal file
1020
lib/python/seiscomp/fdsnws/event.py
Normal file
File diff suppressed because it is too large
Load Diff
216
lib/python/seiscomp/fdsnws/fastsds.py
Normal file
216
lib/python/seiscomp/fdsnws/fastsds.py
Normal file
@ -0,0 +1,216 @@
|
||||
################################################################################
|
||||
# Copyright (C) 2014-2017 by GFZ Potsdam
|
||||
#
|
||||
# Classes to access an SDS structure to be used by the Dataselect-WS
|
||||
#
|
||||
# Author: Javier Quinteros
|
||||
# Email: javier@gfz-potsdam.de
|
||||
################################################################################
|
||||
|
||||
import datetime
|
||||
import os
|
||||
|
||||
import seiscomp.logging
|
||||
import seiscomp.mseedlite
|
||||
|
||||
|
||||
class SDS:
|
||||
def __init__(self, sdsRoot):
|
||||
if isinstance(sdsRoot, list):
|
||||
self.sdsRoot = sdsRoot
|
||||
|
||||
else:
|
||||
self.sdsRoot = [sdsRoot]
|
||||
|
||||
def __getMSName(self, reqDate, net, sta, loc, cha):
|
||||
for root in self.sdsRoot:
|
||||
yield (
|
||||
f"{root}/{reqDate.year}/{net}/{sta}/{cha}.D/{net}.{sta}.{loc}.{cha}.D."
|
||||
f"{reqDate.year}.{reqDate.strftime('%j')}"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def __time2recno(msFile, reclen, timeStart, recStart, timeEnd, recEnd, searchTime):
|
||||
if searchTime <= timeStart:
|
||||
msFile.seek(recStart * reclen)
|
||||
rec = seiscomp.mseedlite.Record(msFile)
|
||||
return (recStart, rec.end_time)
|
||||
|
||||
if searchTime >= timeEnd:
|
||||
msFile.seek(recEnd * reclen)
|
||||
rec = seiscomp.mseedlite.Record(msFile)
|
||||
return (recEnd, rec.end_time)
|
||||
|
||||
t1 = timeStart
|
||||
r1 = recStart
|
||||
t2 = timeEnd
|
||||
r2 = recEnd
|
||||
rn = int(
|
||||
r1
|
||||
+ (r2 - r1) * (searchTime - t1).total_seconds() / (t2 - t1).total_seconds()
|
||||
)
|
||||
|
||||
rn = max(rn, recStart)
|
||||
rn = min(rn, recEnd)
|
||||
|
||||
while True:
|
||||
msFile.seek(rn * reclen)
|
||||
rec = seiscomp.mseedlite.Record(msFile)
|
||||
|
||||
if rec.begin_time < searchTime:
|
||||
r1 = rn
|
||||
t1 = rec.begin_time
|
||||
|
||||
if t1 == t2:
|
||||
break
|
||||
|
||||
rn = int(
|
||||
r1
|
||||
+ (r2 - r1)
|
||||
* (searchTime - t1).total_seconds()
|
||||
/ (t2 - t1).total_seconds()
|
||||
)
|
||||
|
||||
rn = max(rn, recStart)
|
||||
rn = min(rn, recEnd)
|
||||
|
||||
if rn == r1:
|
||||
break
|
||||
|
||||
else:
|
||||
r2 = rn
|
||||
t2 = rec.begin_time
|
||||
|
||||
if t1 == t2:
|
||||
break
|
||||
|
||||
rn = int(
|
||||
r2
|
||||
- (r2 - r1)
|
||||
* (t2 - searchTime).total_seconds()
|
||||
/ (t2 - t1).total_seconds()
|
||||
)
|
||||
|
||||
rn = max(rn, recStart)
|
||||
rn = min(rn, recEnd)
|
||||
|
||||
if rn == r2:
|
||||
break
|
||||
|
||||
return rn, rec.end_time
|
||||
|
||||
def __getWaveform(self, startt, endt, msFile, bufferSize):
|
||||
if startt >= endt:
|
||||
return
|
||||
|
||||
rec = seiscomp.mseedlite.Record(msFile)
|
||||
reclen = rec.size
|
||||
recStart = 0
|
||||
timeStart = rec.begin_time
|
||||
|
||||
if rec.begin_time >= endt:
|
||||
return
|
||||
|
||||
msFile.seek(-reclen, 2)
|
||||
rec = seiscomp.mseedlite.Record(msFile)
|
||||
recEnd = msFile.tell() // reclen - 1
|
||||
timeEnd = rec.begin_time
|
||||
|
||||
if rec.end_time <= startt:
|
||||
return
|
||||
|
||||
if timeStart >= timeEnd:
|
||||
seiscomp.logging.error(
|
||||
f"{msFile.name}: overlap detected (start={timeStart}, end={timeEnd})"
|
||||
)
|
||||
return
|
||||
|
||||
(lower, _) = self.__time2recno(
|
||||
msFile, reclen, timeStart, recStart, timeEnd, recEnd, startt
|
||||
)
|
||||
(upper, _) = self.__time2recno(
|
||||
msFile, reclen, startt, lower, timeEnd, recEnd, endt
|
||||
)
|
||||
|
||||
if upper < lower:
|
||||
seiscomp.logging.error(
|
||||
f"{msFile.name}: overlap detected (lower={lower}, upper={upper})"
|
||||
)
|
||||
upper = lower
|
||||
|
||||
msFile.seek(lower * reclen)
|
||||
remaining = (upper - lower + 1) * reclen
|
||||
check = True
|
||||
|
||||
if bufferSize % reclen:
|
||||
bufferSize += reclen - bufferSize % reclen
|
||||
|
||||
while remaining > 0:
|
||||
size = min(remaining, bufferSize)
|
||||
data = msFile.read(size)
|
||||
remaining -= size
|
||||
offset = 0
|
||||
|
||||
if not data:
|
||||
return
|
||||
|
||||
if check:
|
||||
while offset < len(data):
|
||||
rec = seiscomp.mseedlite.Record(data[offset : offset + reclen])
|
||||
|
||||
if rec.begin_time >= endt:
|
||||
return
|
||||
|
||||
if rec.end_time > startt:
|
||||
break
|
||||
|
||||
offset += reclen
|
||||
|
||||
check = False
|
||||
|
||||
if offset < len(data):
|
||||
yield data[offset:] if offset else data
|
||||
|
||||
while True:
|
||||
data = msFile.read(reclen)
|
||||
|
||||
if not data:
|
||||
return
|
||||
|
||||
rec = seiscomp.mseedlite.Record(data)
|
||||
|
||||
if rec.begin_time >= endt:
|
||||
return
|
||||
|
||||
yield data
|
||||
|
||||
def __getDayRaw(self, day, startt, endt, net, sta, loc, cha, bufferSize):
|
||||
# Take into account the case of empty location
|
||||
if loc == "--":
|
||||
loc = ""
|
||||
|
||||
for dataFile in self.__getMSName(day, net, sta, loc, cha):
|
||||
if not os.path.exists(dataFile):
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(dataFile, "rb") as msFile:
|
||||
for buf in self.__getWaveform(startt, endt, msFile, bufferSize):
|
||||
yield buf
|
||||
|
||||
except seiscomp.mseedlite.MSeedError as e:
|
||||
seiscomp.logging.error(f"{dataFile}: {e}")
|
||||
|
||||
def getRawBytes(self, startt, endt, net, sta, loc, cha, bufferSize):
|
||||
day = datetime.datetime(
|
||||
startt.year, startt.month, startt.day
|
||||
) - datetime.timedelta(days=1)
|
||||
endDay = datetime.datetime(endt.year, endt.month, endt.day)
|
||||
|
||||
while day <= endDay:
|
||||
for buf in self.__getDayRaw(
|
||||
day, startt, endt, net, sta, loc, cha, bufferSize
|
||||
):
|
||||
yield buf
|
||||
|
||||
day += datetime.timedelta(days=1)
|
||||
296
lib/python/seiscomp/fdsnws/http.py
Normal file
296
lib/python/seiscomp/fdsnws/http.py
Normal file
@ -0,0 +1,296 @@
|
||||
################################################################################
|
||||
# Copyright (C) 2013-2014 by gempa GmbH
|
||||
#
|
||||
# HTTP -- Utility methods which generate HTTP result strings
|
||||
#
|
||||
# Author: Stephan Herrnkind
|
||||
# Email: herrnkind@gempa.de
|
||||
################################################################################
|
||||
|
||||
from twisted.web import http, resource, server, static, util
|
||||
|
||||
import seiscomp.core
|
||||
import seiscomp.logging
|
||||
|
||||
from .utils import accessLog, b_str, u_str, writeTSBin
|
||||
|
||||
VERSION = "1.2.5"
|
||||
|
||||
################################################################################
|
||||
|
||||
|
||||
class HTTP:
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def renderErrorPage(request, code, msg, version=VERSION, ro=None):
|
||||
resp = b"""\
|
||||
Error %i: %s
|
||||
|
||||
%s
|
||||
|
||||
Usage details are available from %s
|
||||
|
||||
Request:
|
||||
%s
|
||||
|
||||
Request Submitted:
|
||||
%s
|
||||
|
||||
Service Version:
|
||||
%s
|
||||
"""
|
||||
|
||||
noContent = code == http.NO_CONTENT
|
||||
|
||||
# rewrite response code if requested and no data was found
|
||||
if noContent and ro is not None:
|
||||
code = ro.noData
|
||||
|
||||
# set response code
|
||||
request.setResponseCode(code)
|
||||
|
||||
# status code 204 requires no message body
|
||||
if code == http.NO_CONTENT:
|
||||
response = b""
|
||||
else:
|
||||
request.setHeader("Content-Type", "text/plain; charset=utf-8")
|
||||
|
||||
reference = b"%s/" % request.path.rpartition(b"/")[0]
|
||||
|
||||
codeStr = http.RESPONSES[code]
|
||||
date = b_str(seiscomp.core.Time.GMT().toString("%FT%T.%f"))
|
||||
response = resp % (
|
||||
code,
|
||||
codeStr,
|
||||
b_str(msg),
|
||||
reference,
|
||||
request.uri,
|
||||
date,
|
||||
b_str(version),
|
||||
)
|
||||
if not noContent:
|
||||
seiscomp.logging.warning(
|
||||
f"responding with error: {code} ({u_str(codeStr)})"
|
||||
)
|
||||
|
||||
accessLog(request, ro, code, len(response), msg)
|
||||
return response
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def renderNotFound(request, version=VERSION):
|
||||
msg = "The requested resource does not exist on this server."
|
||||
return HTTP.renderErrorPage(request, http.NOT_FOUND, msg, version)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def renderNotModified(request, ro=None):
|
||||
code = http.NOT_MODIFIED
|
||||
request.setResponseCode(code)
|
||||
request.responseHeaders.removeHeader("Content-Type")
|
||||
accessLog(request, ro, code, 0, None)
|
||||
|
||||
|
||||
################################################################################
|
||||
class ServiceVersion(resource.Resource):
|
||||
isLeaf = True
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self, version):
|
||||
super().__init__()
|
||||
|
||||
self.version = version
|
||||
self.type = "text/plain"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render(self, request):
|
||||
request.setHeader("Content-Type", "text/plain; charset=utf-8")
|
||||
return b_str(self.version)
|
||||
|
||||
|
||||
################################################################################
|
||||
class WADLFilter(static.Data):
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self, path, paramNameFilterList):
|
||||
data = ""
|
||||
removeParam = False
|
||||
with open(path, "r", encoding="utf-8") as fp:
|
||||
for line in fp:
|
||||
lineStripped = line.strip().replace(" ", "")
|
||||
if removeParam:
|
||||
if "</param>" in lineStripped:
|
||||
removeParam = False
|
||||
continue
|
||||
|
||||
valid = True
|
||||
if "<param" in lineStripped:
|
||||
for f in paramNameFilterList:
|
||||
if f'name="{f}"' in lineStripped:
|
||||
valid = False
|
||||
if lineStripped[-2:] != "/>":
|
||||
removeParam = True
|
||||
break
|
||||
|
||||
if valid:
|
||||
data += line
|
||||
|
||||
super().__init__(b_str(data), "application/xml; charset=utf-8")
|
||||
|
||||
|
||||
################################################################################
|
||||
class BaseResource(resource.Resource):
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self, version=VERSION):
|
||||
super().__init__()
|
||||
|
||||
self.version = version
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def renderErrorPage(self, request, code, msg, ro=None):
|
||||
return HTTP.renderErrorPage(request, code, msg, self.version, ro)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def writeErrorPage(self, request, code, msg, ro=None):
|
||||
data = self.renderErrorPage(request, code, msg, ro)
|
||||
if data:
|
||||
writeTSBin(request, data)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def returnNotModified(self, request, ro=None):
|
||||
HTTP.renderNotModified(request, ro)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Renders error page if the result set exceeds the configured maximum number
|
||||
# objects
|
||||
def checkObjects(self, request, objCount, maxObj):
|
||||
if objCount <= maxObj:
|
||||
return True
|
||||
|
||||
msg = (
|
||||
"The result set of your request exceeds the configured maximum "
|
||||
f"number of objects ({maxObj}). Refine your request parameters."
|
||||
)
|
||||
self.writeErrorPage(request, http.REQUEST_ENTITY_TOO_LARGE, msg)
|
||||
return False
|
||||
|
||||
|
||||
################################################################################
|
||||
class NoResource(BaseResource):
|
||||
isLeaf = True
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render(self, request):
|
||||
return HTTP.renderNotFound(request, self.version)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def getChild(self, _path, _request):
|
||||
return self
|
||||
|
||||
|
||||
################################################################################
|
||||
class ListingResource(BaseResource):
|
||||
html = """<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="author" content="gempa GmbH">
|
||||
<title>SeisComP FDSNWS Implementation</title>
|
||||
</head>
|
||||
<body>
|
||||
<p><a href="../">Parent Directory</a></p>
|
||||
<h1>SeisComP FDSNWS Web Service</h1>
|
||||
<p>Index of %s</p>
|
||||
<ul>
|
||||
%s
|
||||
</ul>
|
||||
</body>"""
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render(self, request):
|
||||
lis = ""
|
||||
if request.path[-1:] != b"/":
|
||||
return util.redirectTo(request.path + b"/", request)
|
||||
|
||||
for k, v in self.children.items():
|
||||
if v.isLeaf:
|
||||
continue
|
||||
if hasattr(v, "hideInListing") and v.hideInListing:
|
||||
continue
|
||||
name = u_str(k)
|
||||
lis += f'<li><a href="{name}/">{name}/</a></li>\n'
|
||||
|
||||
return b_str(ListingResource.html % (u_str(request.path), lis))
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def getChild(self, path, _request):
|
||||
if not path:
|
||||
return self
|
||||
|
||||
return NoResource(self.version)
|
||||
|
||||
|
||||
################################################################################
|
||||
class DirectoryResource(static.File):
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self, fileName, version=VERSION):
|
||||
super().__init__(fileName)
|
||||
|
||||
self.version = version
|
||||
self.childNotFound = NoResource(self.version)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render(self, request):
|
||||
if request.path[-1:] != b"/":
|
||||
return util.redirectTo(request.path + b"/", request)
|
||||
|
||||
return static.File.render(self, request)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def getChild(self, path, _request):
|
||||
if not path:
|
||||
return self
|
||||
|
||||
return NoResource(self.version)
|
||||
|
||||
|
||||
################################################################################
|
||||
class Site(server.Site):
|
||||
def __init__(self, res, corsOrigins):
|
||||
super().__init__(res)
|
||||
|
||||
self._corsOrigins = corsOrigins
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def getResourceFor(self, request):
|
||||
seiscomp.logging.debug(
|
||||
f"request ({request.getClientIP()}): {u_str(request.uri)}"
|
||||
)
|
||||
request.setHeader("Server", f"SeisComP-FDSNWS/{VERSION}")
|
||||
request.setHeader("Access-Control-Allow-Headers", "Authorization")
|
||||
request.setHeader("Access-Control-Expose-Headers", "WWW-Authenticate")
|
||||
|
||||
self.setAllowOrigin(request)
|
||||
|
||||
return server.Site.getResourceFor(self, request)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def setAllowOrigin(self, req):
|
||||
# no allowed origin: no response header
|
||||
lenOrigins = len(self._corsOrigins)
|
||||
if lenOrigins == 0:
|
||||
return
|
||||
|
||||
# one origin: add header
|
||||
if lenOrigins == 1:
|
||||
req.setHeader("Access-Control-Allow-Origin", self._corsOrigins[0])
|
||||
return
|
||||
|
||||
# more than one origin: check current origin against allowed origins
|
||||
# and return the current origin on match.
|
||||
origin = req.getHeader("Origin")
|
||||
if origin in self._corsOrigins:
|
||||
req.setHeader("Access-Control-Allow-Origin", origin)
|
||||
|
||||
# Set Vary header to let the browser know that the response depends
|
||||
# on the request. Certain cache strategies should be disabled.
|
||||
req.setHeader("Vary", "Origin")
|
||||
101
lib/python/seiscomp/fdsnws/log.py
Normal file
101
lib/python/seiscomp/fdsnws/log.py
Normal file
@ -0,0 +1,101 @@
|
||||
################################################################################
|
||||
# Copyright (C) 2013-2014 gempa GmbH
|
||||
#
|
||||
# Thread-safe file logger
|
||||
#
|
||||
# Author: Stephan Herrnkind
|
||||
# Email: herrnkind@gempa.de
|
||||
################################################################################
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import threading
|
||||
|
||||
from queue import Queue
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _worker(log):
|
||||
while True:
|
||||
# pylint: disable=W0212
|
||||
msg = log._queue.get()
|
||||
log._write(str(msg))
|
||||
log._queue.task_done()
|
||||
|
||||
|
||||
################################################################################
|
||||
class Log:
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self, filePath, archiveSize=7):
|
||||
self._filePath = filePath
|
||||
self._basePath = os.path.dirname(filePath)
|
||||
self._fileName = os.path.basename(filePath)
|
||||
self._archiveSize = archiveSize
|
||||
self._queue = Queue()
|
||||
self._lastLogTime = None
|
||||
self._fd = None
|
||||
|
||||
self._archiveSize = max(self._archiveSize, 0)
|
||||
|
||||
# worker thread, responsible for writing messages to file
|
||||
t = threading.Thread(target=_worker, args=(self,))
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def __del__(self):
|
||||
# wait for worker thread to write all pending log messages
|
||||
self._queue.join()
|
||||
|
||||
if self._fd is not None:
|
||||
self._fd.close()
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def log(self, msg):
|
||||
self._queue.put(msg)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def _rotate(self):
|
||||
self._fd.close()
|
||||
self._fd = None
|
||||
|
||||
try:
|
||||
pattern = f"{self._filePath}.%i"
|
||||
for i in range(self._archiveSize, 1, -1):
|
||||
src = pattern % (i - 1)
|
||||
if os.path.isfile(src):
|
||||
os.rename(pattern % (i - 1), pattern % i)
|
||||
os.rename(self._filePath, pattern % 1)
|
||||
except Exception as e:
|
||||
print(f"failed to rotate access log: {e}", file=sys.stderr)
|
||||
|
||||
self._fd = open(self._filePath, "w", encoding="utf-8")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def _write(self, msg):
|
||||
try:
|
||||
now = time.localtime()
|
||||
if self._fd is None:
|
||||
if self._basePath and not os.path.exists(self._basePath):
|
||||
os.makedirs(self._basePath)
|
||||
self._fd = open(self._filePath, "a", encoding="utf-8")
|
||||
elif (
|
||||
self._archiveSize > 0
|
||||
and self._lastLogTime is not None
|
||||
and (
|
||||
self._lastLogTime.tm_yday != now.tm_yday
|
||||
or self._lastLogTime.tm_year != now.tm_year
|
||||
)
|
||||
):
|
||||
self._rotate()
|
||||
|
||||
print(msg, file=self._fd)
|
||||
self._fd.flush()
|
||||
self._lastLogTime = now
|
||||
except Exception as e:
|
||||
print(f"access log: {e}", file=sys.stderr)
|
||||
|
||||
|
||||
# vim: ts=4 et
|
||||
138
lib/python/seiscomp/fdsnws/reqlog.py
Normal file
138
lib/python/seiscomp/fdsnws/reqlog.py
Normal file
@ -0,0 +1,138 @@
|
||||
import os
|
||||
import datetime
|
||||
import json
|
||||
import hashlib
|
||||
import subprocess
|
||||
import logging
|
||||
import logging.handlers
|
||||
import threading
|
||||
|
||||
|
||||
from .utils import b_str
|
||||
|
||||
mutex = threading.Lock()
|
||||
|
||||
|
||||
class MyFileHandler(logging.handlers.TimedRotatingFileHandler):
|
||||
def __init__(self, filename):
|
||||
super().__init__(filename, when="midnight", utc=True)
|
||||
|
||||
def rotate(self, source, dest):
|
||||
super().rotate(source, dest)
|
||||
|
||||
if os.path.exists(dest):
|
||||
subprocess.Popen(["bzip2", dest])
|
||||
|
||||
|
||||
class Tracker:
|
||||
def __init__(self, logger, geoip, service, userName, userIP, clientID, userSalt):
|
||||
self.__logger = logger
|
||||
self.__userName = userName
|
||||
self.__userSalt = userSalt
|
||||
self.__logged = False
|
||||
|
||||
if userName:
|
||||
userID = int(
|
||||
hashlib.md5(b_str(userSalt + userName.lower())).hexdigest()[:8], 16
|
||||
)
|
||||
else:
|
||||
userID = int(hashlib.md5(b_str(userSalt + userIP)).hexdigest()[:8], 16)
|
||||
|
||||
self.__data = {
|
||||
"service": service,
|
||||
"userID": userID,
|
||||
"clientID": clientID,
|
||||
"userEmail": None,
|
||||
"auth": bool(userName),
|
||||
"userLocation": {},
|
||||
"created": f"{datetime.datetime.utcnow().isoformat()}Z",
|
||||
}
|
||||
|
||||
if geoip:
|
||||
self.__data["userLocation"]["country"] = geoip.country_code_by_addr(userIP)
|
||||
|
||||
if (
|
||||
userName and userName.lower().endswith("@gfz-potsdam.de")
|
||||
) or userIP.startswith("139.17."):
|
||||
self.__data["userLocation"]["institution"] = "GFZ"
|
||||
|
||||
# pylint: disable=W0613
|
||||
def line_status(
|
||||
self,
|
||||
start_time,
|
||||
end_time,
|
||||
network,
|
||||
station,
|
||||
channel,
|
||||
location,
|
||||
restricted,
|
||||
net_class,
|
||||
shared,
|
||||
constraints,
|
||||
volume,
|
||||
status,
|
||||
size,
|
||||
message,
|
||||
):
|
||||
try:
|
||||
trace = self.__data["trace"]
|
||||
|
||||
except KeyError:
|
||||
trace = []
|
||||
self.__data["trace"] = trace
|
||||
|
||||
trace.append(
|
||||
{
|
||||
"net": network,
|
||||
"sta": station,
|
||||
"loc": location,
|
||||
"cha": channel,
|
||||
"start": start_time.iso(),
|
||||
"end": end_time.iso(),
|
||||
"restricted": restricted,
|
||||
"status": status,
|
||||
"bytes": size,
|
||||
}
|
||||
)
|
||||
|
||||
if restricted and status == "OK":
|
||||
self.__data["userEmail"] = self.__userName
|
||||
|
||||
# FDSNWS requests have one volume, so volume_status() is called once per request
|
||||
def volume_status(self, volume, status, size, message):
|
||||
self.__data["status"] = status
|
||||
self.__data["bytes"] = size
|
||||
self.__data["finished"] = f"{datetime.datetime.utcnow().isoformat()}Z"
|
||||
|
||||
def request_status(self, status, message):
|
||||
with mutex:
|
||||
if not self.__logged:
|
||||
self.__logger.info(json.dumps(self.__data))
|
||||
self.__logged = True
|
||||
|
||||
|
||||
class RequestLog:
|
||||
def __init__(self, filename, userSalt):
|
||||
self.__logger = logging.getLogger("seiscomp.fdsnws.reqlog")
|
||||
self.__logger.addHandler(MyFileHandler(filename))
|
||||
self.__logger.setLevel(logging.INFO)
|
||||
self.__userSalt = userSalt
|
||||
|
||||
try:
|
||||
import GeoIP
|
||||
|
||||
self.__geoip = GeoIP.new(GeoIP.GEOIP_MEMORY_CACHE)
|
||||
|
||||
except ImportError:
|
||||
self.__geoip = None
|
||||
|
||||
def tracker(self, service, userName, userIP, clientID):
|
||||
return Tracker(
|
||||
self.__logger,
|
||||
self.__geoip,
|
||||
service,
|
||||
userName,
|
||||
userIP,
|
||||
clientID,
|
||||
self.__userSalt,
|
||||
)
|
||||
179
lib/python/seiscomp/fdsnws/reqtrack.py
Normal file
179
lib/python/seiscomp/fdsnws/reqtrack.py
Normal file
@ -0,0 +1,179 @@
|
||||
from twisted.internet import reactor
|
||||
import seiscomp.core
|
||||
import seiscomp.datamodel
|
||||
|
||||
|
||||
def callFromThread(f):
|
||||
def wrap(*args, **kwargs):
|
||||
reactor.callFromThread(f, *args, **kwargs)
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
def enableNotifier(f):
|
||||
def wrap(*args, **kwargs):
|
||||
saveState = seiscomp.datamodel.Notifier.IsEnabled()
|
||||
seiscomp.datamodel.Notifier.SetEnabled(True)
|
||||
f(*args, **kwargs)
|
||||
seiscomp.datamodel.Notifier.SetEnabled(saveState)
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
class RequestTrackerDB(object):
|
||||
def __init__(
|
||||
self,
|
||||
appName,
|
||||
msgConn,
|
||||
req_id,
|
||||
req_type,
|
||||
user,
|
||||
header,
|
||||
label,
|
||||
user_ip,
|
||||
client_ip,
|
||||
):
|
||||
self.msgConn = msgConn
|
||||
self.arclinkRequest = seiscomp.datamodel.ArclinkRequest.Create()
|
||||
self.arclinkRequest.setCreated(seiscomp.core.Time.GMT())
|
||||
self.arclinkRequest.setRequestID(req_id)
|
||||
self.arclinkRequest.setUserID(str(user))
|
||||
self.arclinkRequest.setClientID(appName)
|
||||
if user_ip:
|
||||
self.arclinkRequest.setUserIP(user_ip)
|
||||
if client_ip:
|
||||
self.arclinkRequest.setClientIP(client_ip)
|
||||
self.arclinkRequest.setType(req_type)
|
||||
self.arclinkRequest.setLabel(label)
|
||||
self.arclinkRequest.setHeader(header)
|
||||
|
||||
self.averageTimeWindow = seiscomp.core.TimeSpan(0.0)
|
||||
self.totalLineCount = 0
|
||||
self.okLineCount = 0
|
||||
|
||||
self.requestLines = []
|
||||
self.statusLines = []
|
||||
|
||||
def send(self):
|
||||
msg = seiscomp.datamodel.Notifier.GetMessage(True)
|
||||
if msg:
|
||||
self.msgConn.send("LOGGING", msg)
|
||||
|
||||
def line_status(
|
||||
self,
|
||||
start_time,
|
||||
end_time,
|
||||
network,
|
||||
station,
|
||||
channel,
|
||||
location,
|
||||
restricted,
|
||||
net_class,
|
||||
shared,
|
||||
constraints,
|
||||
volume,
|
||||
status,
|
||||
size,
|
||||
message,
|
||||
):
|
||||
if network is None or network == "":
|
||||
network = "."
|
||||
if station is None or station == "":
|
||||
station = "."
|
||||
if channel is None or channel == "":
|
||||
channel = "."
|
||||
if location is None or location == "":
|
||||
location = "."
|
||||
if volume is None:
|
||||
volume = "NODATA"
|
||||
if size is None:
|
||||
size = 0
|
||||
if message is None:
|
||||
message = ""
|
||||
|
||||
if isinstance(constraints, list):
|
||||
constr = " ".join(constraints)
|
||||
else:
|
||||
constr = " ".join([f"{a}={b}" for (a, b) in constraints.items()])
|
||||
|
||||
arclinkRequestLine = seiscomp.datamodel.ArclinkRequestLine()
|
||||
arclinkRequestLine.setStart(start_time)
|
||||
arclinkRequestLine.setEnd(end_time)
|
||||
arclinkRequestLine.setStreamID(
|
||||
seiscomp.datamodel.WaveformStreamID(
|
||||
network[:8], station[:8], location[:8], channel[:8], ""
|
||||
)
|
||||
)
|
||||
arclinkRequestLine.setConstraints(constr)
|
||||
if isinstance(restricted, bool):
|
||||
arclinkRequestLine.setRestricted(restricted)
|
||||
arclinkRequestLine.setNetClass(net_class)
|
||||
if isinstance(shared, bool):
|
||||
arclinkRequestLine.setShared(shared)
|
||||
#
|
||||
arclinkStatusLine = seiscomp.datamodel.ArclinkStatusLine()
|
||||
arclinkStatusLine.setVolumeID(volume)
|
||||
arclinkStatusLine.setStatus(status)
|
||||
arclinkStatusLine.setSize(size)
|
||||
arclinkStatusLine.setMessage(message)
|
||||
#
|
||||
arclinkRequestLine.setStatus(arclinkStatusLine)
|
||||
self.requestLines.append(arclinkRequestLine)
|
||||
|
||||
self.averageTimeWindow += end_time - start_time
|
||||
self.totalLineCount += 1
|
||||
if status == "OK":
|
||||
self.okLineCount += 1
|
||||
|
||||
def volume_status(self, volume, status, size, message):
|
||||
if volume is None:
|
||||
volume = "NODATA"
|
||||
if size is None:
|
||||
size = 0
|
||||
if message is None:
|
||||
message = ""
|
||||
|
||||
arclinkStatusLine = seiscomp.datamodel.ArclinkStatusLine()
|
||||
arclinkStatusLine.setVolumeID(volume)
|
||||
arclinkStatusLine.setStatus(status)
|
||||
arclinkStatusLine.setSize(size)
|
||||
arclinkStatusLine.setMessage(message)
|
||||
self.statusLines.append(arclinkStatusLine)
|
||||
|
||||
@callFromThread
|
||||
@enableNotifier
|
||||
def request_status(self, status, message):
|
||||
if message is None:
|
||||
message = ""
|
||||
|
||||
self.arclinkRequest.setStatus(status)
|
||||
self.arclinkRequest.setMessage(message)
|
||||
|
||||
ars = seiscomp.datamodel.ArclinkRequestSummary()
|
||||
tw = self.averageTimeWindow.seconds()
|
||||
if self.totalLineCount > 0:
|
||||
# avarage request time window
|
||||
tw = self.averageTimeWindow.seconds() // self.totalLineCount
|
||||
if tw >= 2**31:
|
||||
tw = -1 # prevent 32bit int overflow
|
||||
ars.setAverageTimeWindow(tw)
|
||||
ars.setTotalLineCount(self.totalLineCount)
|
||||
ars.setOkLineCount(self.okLineCount)
|
||||
self.arclinkRequest.setSummary(ars)
|
||||
|
||||
al = seiscomp.datamodel.ArclinkLog()
|
||||
al.add(self.arclinkRequest)
|
||||
|
||||
for obj in self.requestLines:
|
||||
self.arclinkRequest.add(obj)
|
||||
|
||||
for obj in self.statusLines:
|
||||
self.arclinkRequest.add(obj)
|
||||
|
||||
self.send()
|
||||
|
||||
def __verseed_errors(self, volume):
|
||||
pass
|
||||
|
||||
def verseed(self, volume, file):
|
||||
pass
|
||||
609
lib/python/seiscomp/fdsnws/request.py
Normal file
609
lib/python/seiscomp/fdsnws/request.py
Normal file
@ -0,0 +1,609 @@
|
||||
################################################################################
|
||||
# Copyright (C) 2013-2014 gempa GmbH
|
||||
#
|
||||
# RequestOptions -- HTTP GET request parameters
|
||||
#
|
||||
# Author: Stephan Herrnkind
|
||||
# Email: herrnkind@gempa.de
|
||||
################################################################################
|
||||
|
||||
import fnmatch
|
||||
import math
|
||||
import re
|
||||
|
||||
from twisted.web import http
|
||||
|
||||
from seiscomp.core import Time
|
||||
import seiscomp.logging
|
||||
import seiscomp.math
|
||||
|
||||
from .utils import u_str
|
||||
|
||||
|
||||
class RequestOptions:
|
||||
# the match() method matched only patterns at the beginning of a string,
|
||||
# since we have to ensure that no invalid character is present we use the
|
||||
# search() method in combination with a negated pattern instead
|
||||
FloatChars = re.compile(r"[^-0-9.]").search
|
||||
ChannelChars = re.compile(r"[^A-Za-z0-9*?]").search
|
||||
ChannelExtChars = re.compile(r"[^A-Za-z0-9*?+\-_]").search
|
||||
BooleanTrueValues = ["1", "true", "t", "yes", "y"]
|
||||
BooleanFalseValues = ["0", "false", "f", "no", "n"]
|
||||
OutputFormats = [] # override in derived classes
|
||||
|
||||
PStart = ["starttime", "start"]
|
||||
PEnd = ["endtime", "end"]
|
||||
PStartBefore = ["startbefore"]
|
||||
PStartAfter = ["startafter"]
|
||||
PEndBefore = ["endbefore"]
|
||||
PEndAfter = ["endafter"]
|
||||
SimpleTimeParams = PStart + PEnd
|
||||
WindowTimeParams = PStartBefore + PStartAfter + PEndBefore + PEndAfter
|
||||
TimeParams = SimpleTimeParams + WindowTimeParams
|
||||
|
||||
PNet = ["network", "net"]
|
||||
PSta = ["station", "sta"]
|
||||
PLoc = ["location", "loc"]
|
||||
PCha = ["channel", "cha"]
|
||||
StreamParams = PNet + PSta + PLoc + PCha
|
||||
|
||||
PMinLat = ["minlatitude", "minlat"]
|
||||
PMaxLat = ["maxlatitude", "maxlat"]
|
||||
PMinLon = ["minlongitude", "minlon"]
|
||||
PMaxLon = ["maxlongitude", "maxlon"]
|
||||
PLat = ["latitude", "lat"]
|
||||
PLon = ["longitude", "lon"]
|
||||
PMinRadius = ["minradius"]
|
||||
PMaxRadius = ["maxradius"]
|
||||
GeoRectParams = PMinLat + PMaxLat + PMinLon + PMaxLon
|
||||
GeoCircleParams = PLat + PLon + PMinRadius + PMaxRadius
|
||||
GeoParams = GeoRectParams + GeoCircleParams
|
||||
|
||||
PFormat = ["format"]
|
||||
PNoData = ["nodata"]
|
||||
OutputParams = PFormat + PNoData
|
||||
|
||||
POSTParams = OutputParams
|
||||
GETParams = StreamParams + SimpleTimeParams
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
class Channel:
|
||||
def __init__(self):
|
||||
self.net = None
|
||||
self.sta = None
|
||||
self.loc = None
|
||||
self.cha = None
|
||||
|
||||
def matchNet(self, value):
|
||||
return self.match(value, self.net)
|
||||
|
||||
def matchSta(self, value):
|
||||
return self.match(value, self.sta)
|
||||
|
||||
def matchLoc(self, value):
|
||||
return self.match(value, self.loc, True)
|
||||
|
||||
def matchCha(self, value):
|
||||
return self.match(value, self.cha)
|
||||
|
||||
@staticmethod
|
||||
def match(value, globList, testEmpty=False):
|
||||
if not globList:
|
||||
return True
|
||||
|
||||
for glob in globList:
|
||||
if testEmpty and value == "" and glob == "--":
|
||||
return True
|
||||
if fnmatch.fnmatchcase(value, glob):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
class Time:
|
||||
def __init__(self):
|
||||
self.simpleTime = True
|
||||
self.start = None
|
||||
self.end = None
|
||||
# window time only
|
||||
self.startBefore = None
|
||||
self.startAfter = None
|
||||
self.endBefore = None
|
||||
self.endAfter = None
|
||||
|
||||
# used by FDSN Station and DataSelect
|
||||
def match(self, start, end=None):
|
||||
# simple time: limit to epochs intersecting with the specified time
|
||||
# range
|
||||
res = (self.start is None or end is None or end >= self.start) and (
|
||||
self.end is None or start <= self.end
|
||||
)
|
||||
|
||||
# window time: limit to epochs strictly starting or ending before or
|
||||
# after a specified time value
|
||||
if not self.simpleTime:
|
||||
res = (
|
||||
res
|
||||
and (
|
||||
self.startBefore is None
|
||||
or (start is not None and start < self.startBefore)
|
||||
)
|
||||
and (
|
||||
self.startAfter is None
|
||||
or (start is not None and start > self.startAfter)
|
||||
)
|
||||
and (
|
||||
self.endBefore is None
|
||||
or (end is not None and end < self.endBefore)
|
||||
)
|
||||
and (self.endAfter is None or end is None or end > self.endAfter)
|
||||
)
|
||||
|
||||
return res
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
class Geo:
|
||||
# -----------------------------------------------------------------------
|
||||
class BBox:
|
||||
def __init__(self):
|
||||
self.minLat = None
|
||||
self.maxLat = None
|
||||
self.minLon = None
|
||||
self.maxLon = None
|
||||
|
||||
def dateLineCrossing(self):
|
||||
return self.minLon and self.maxLon and self.minLon > self.maxLon
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
class BCircle:
|
||||
def __init__(self):
|
||||
self.lat = None
|
||||
self.lon = None
|
||||
self.minRad = None
|
||||
self.maxRad = None
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Calculates outer bounding box
|
||||
def calculateBBox(self):
|
||||
def rad(degree):
|
||||
return math.radians(degree)
|
||||
|
||||
def deg(radians):
|
||||
return math.degrees(radians)
|
||||
|
||||
b = RequestOptions.Geo.BBox()
|
||||
if self.maxRad is None or self.maxRad >= 180:
|
||||
return b
|
||||
|
||||
b.minLat = self.lat - self.maxRad
|
||||
b.maxLat = self.lat + self.maxRad
|
||||
if b.minLat > -90 and b.maxLat < 90:
|
||||
dLon = deg(
|
||||
math.asin(math.sin(rad(self.maxRad) / math.cos(rad(self.lat))))
|
||||
)
|
||||
b.minLon = self.lon - dLon
|
||||
if b.minLon < -180:
|
||||
b.minLon += 360
|
||||
b.maxLon = self.lon + dLon
|
||||
if b.maxLon > 180:
|
||||
b.maxLon -= 360
|
||||
else:
|
||||
# pole within distance: one latitude and no longitude
|
||||
# restrictions remains
|
||||
if b.minLat <= -90:
|
||||
b.minLat = None
|
||||
else:
|
||||
b.maxLat = None
|
||||
b.minLon = None
|
||||
b.maxLon = None
|
||||
|
||||
return b
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
def __init__(self):
|
||||
self.bBox = None
|
||||
self.bCircle = None
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
def match(self, lat, lon):
|
||||
if self.bBox is not None:
|
||||
b = self.bBox
|
||||
if b.minLat is not None and lat < b.minLat:
|
||||
return False
|
||||
if b.maxLat is not None and lat > b.maxLat:
|
||||
return False
|
||||
# date line crossing if minLon > maxLon
|
||||
if b.dateLineCrossing():
|
||||
return lon >= b.minLon or lon <= b.maxLon
|
||||
if b.minLon is not None and lon < b.minLon:
|
||||
return False
|
||||
if b.maxLon is not None and lon > b.maxLon:
|
||||
return False
|
||||
return True
|
||||
|
||||
if self.bCircle:
|
||||
c = self.bCircle
|
||||
dist = seiscomp.math.delazi(c.lat, c.lon, lat, lon)
|
||||
if c.minRad is not None and dist[0] < c.minRad:
|
||||
return False
|
||||
if c.maxRad is not None and dist[0] > c.maxRad:
|
||||
return False
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self):
|
||||
self.service = ""
|
||||
self.accessTime = Time.GMT()
|
||||
self.userName = None
|
||||
|
||||
self.time = None
|
||||
self.channel = None
|
||||
self.geo = None
|
||||
|
||||
self.noData = http.NO_CONTENT
|
||||
self.format = None
|
||||
|
||||
self._args = {}
|
||||
self.streams = [] # 1 entry for GET, multipl
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseOutput(self):
|
||||
# nodata
|
||||
code = self.parseInt(self.PNoData)
|
||||
if code is not None:
|
||||
if code not in (http.NO_CONTENT, http.NOT_FOUND):
|
||||
self.raiseValueError(self.PNoData[0])
|
||||
self.noData = code
|
||||
|
||||
# format
|
||||
key, value = self.getFirstValue(self.PFormat)
|
||||
if value is None:
|
||||
# no format specified: default to first in list if available
|
||||
if len(self.OutputFormats) > 0:
|
||||
self.format = self.OutputFormats[0]
|
||||
else:
|
||||
value = value.lower()
|
||||
if value in self.OutputFormats:
|
||||
self.format = value
|
||||
else:
|
||||
self.raiseValueError(key)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseChannel(self):
|
||||
c = RequestOptions.Channel()
|
||||
|
||||
c.net = self.parseChannelChars(self.PNet, False, True)
|
||||
c.sta = self.parseChannelChars(self.PSta)
|
||||
c.loc = self.parseChannelChars(self.PLoc, True)
|
||||
c.cha = self.parseChannelChars(self.PCha)
|
||||
|
||||
if c.net or c.sta or c.loc or c.cha:
|
||||
self.channel = c
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseTime(self, parseWindowTime=False):
|
||||
t = RequestOptions.Time()
|
||||
|
||||
# start[time], end[time]
|
||||
t.start = self.parseTimeStr(self.PStart)
|
||||
t.end = self.parseTimeStr(self.PEnd)
|
||||
|
||||
simpleTime = t.start is not None or t.end is not None
|
||||
|
||||
# [start,end][before,after]
|
||||
if parseWindowTime:
|
||||
t.startBefore = self.parseTimeStr(self.PStartBefore)
|
||||
t.startAfter = self.parseTimeStr(self.PStartAfter)
|
||||
t.endBefore = self.parseTimeStr(self.PEndBefore)
|
||||
t.endAfter = self.parseTimeStr(self.PEndAfter)
|
||||
|
||||
windowTime = (
|
||||
t.startBefore is not None
|
||||
or t.startAfter is not None
|
||||
or t.endBefore is not None
|
||||
or t.endAfter is not None
|
||||
)
|
||||
if simpleTime or windowTime:
|
||||
self.time = t
|
||||
self.time.simpleTime = not windowTime
|
||||
|
||||
elif simpleTime:
|
||||
self.time = t
|
||||
self.time.simpleTime = True
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseGeo(self):
|
||||
# bounding box (optional)
|
||||
b = RequestOptions.Geo.BBox()
|
||||
b.minLat = self.parseFloat(self.PMinLat, -90, 90)
|
||||
b.maxLat = self.parseFloat(self.PMaxLat, -90, 90)
|
||||
if b.minLat is not None and b.maxLat is not None and b.minLat > b.maxLat:
|
||||
raise ValueError(f"{self.PMinLat[0]} exceeds {self.PMaxLat[0]}")
|
||||
|
||||
b.minLon = self.parseFloat(self.PMinLon, -180, 180)
|
||||
b.maxLon = self.parseFloat(self.PMaxLon, -180, 180)
|
||||
# maxLon < minLon -> date line crossing
|
||||
|
||||
hasBBoxParam = (
|
||||
b.minLat is not None
|
||||
or b.maxLat is not None
|
||||
or b.minLon is not None
|
||||
or b.maxLon is not None
|
||||
)
|
||||
|
||||
# bounding circle (optional)
|
||||
c = RequestOptions.Geo.BCircle()
|
||||
c.lat = self.parseFloat(self.PLat, -90, 90)
|
||||
c.lon = self.parseFloat(self.PLon, -180, 180)
|
||||
c.minRad = self.parseFloat(self.PMinRadius, 0, 180)
|
||||
c.maxRad = self.parseFloat(self.PMaxRadius, 0, 180)
|
||||
if c.minRad is not None and c.maxRad is not None and c.minRad > c.maxRad:
|
||||
raise ValueError(f"{self.PMinRadius[0]} exceeds {self.PMaxRadius[0]}")
|
||||
|
||||
hasBCircleRadParam = c.minRad is not None or c.maxRad is not None
|
||||
hasBCircleParam = c.lat is not None or c.lon is not None or hasBCircleRadParam
|
||||
|
||||
# bounding box and bounding circle may not be combined
|
||||
if hasBBoxParam and hasBCircleParam:
|
||||
raise ValueError(
|
||||
"bounding box and bounding circle parameters may not be combined"
|
||||
)
|
||||
if hasBBoxParam:
|
||||
self.geo = RequestOptions.Geo()
|
||||
self.geo.bBox = b
|
||||
elif hasBCircleRadParam:
|
||||
self.geo = RequestOptions.Geo()
|
||||
if c.lat is None:
|
||||
c.lat = 0.0
|
||||
if c.lon is None:
|
||||
c.lon = 0.0
|
||||
self.geo.bCircle = c
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def _assertValueRange(key, v, minValue, maxValue):
|
||||
if (minValue is not None and v < minValue) or (
|
||||
maxValue is not None and v > maxValue
|
||||
):
|
||||
minStr, maxStr = "-inf", "inf"
|
||||
if minValue is not None:
|
||||
minStr = str(minValue)
|
||||
if maxValue is not None:
|
||||
maxStr = str(maxValue)
|
||||
raise ValueError(f"parameter not in domain [{minStr},{maxStr}]: {key}")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def raiseValueError(key):
|
||||
raise ValueError(f"invalid value in parameter: {key}")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def getFirstValue(self, keys):
|
||||
for key in keys:
|
||||
if key in self._args:
|
||||
return key, self._args[key][0].strip()
|
||||
|
||||
return None, None
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def getValues(self, keys):
|
||||
v = []
|
||||
for key in keys:
|
||||
if key in self._args:
|
||||
v += self._args[key]
|
||||
return v
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def getListValues(self, keys, lower=False):
|
||||
values = set()
|
||||
for key in keys:
|
||||
if key not in self._args:
|
||||
continue
|
||||
|
||||
for vList in self._args[key]:
|
||||
for v in vList.split(","):
|
||||
if v is None:
|
||||
continue
|
||||
v = v.strip()
|
||||
if lower:
|
||||
v = v.lower()
|
||||
values.add(v)
|
||||
|
||||
return values
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseInt(self, keys, minValue=None, maxValue=None):
|
||||
key, value = self.getFirstValue(keys)
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
i = int(value)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"invalid integer value in parameter: {key}") from e
|
||||
|
||||
self._assertValueRange(key, i, minValue, maxValue)
|
||||
return i
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseFloat(self, keys, minValue=None, maxValue=None):
|
||||
key, value = self.getFirstValue(keys)
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
if self.FloatChars(value):
|
||||
raise ValueError(
|
||||
f"invalid characters in float parameter: {key} (scientific notation "
|
||||
"forbidden by spec)"
|
||||
)
|
||||
|
||||
try:
|
||||
f = float(value)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"invalid float value in parameter: {key}") from e
|
||||
|
||||
self._assertValueRange(key, f, minValue, maxValue)
|
||||
return f
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseBool(self, keys):
|
||||
key, value = self.getFirstValue(keys)
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
value = value.lower()
|
||||
if value in self.BooleanTrueValues:
|
||||
return True
|
||||
if value in self.BooleanFalseValues:
|
||||
return False
|
||||
|
||||
raise ValueError(f"invalid boolean value in parameter: {key}")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseTimeStr(self, keys):
|
||||
key, value = self.getFirstValue(keys)
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
time = Time.FromString(value)
|
||||
# use explicit test for None here since bool value for epoch date
|
||||
# (1970-01-01) is False
|
||||
if time is None:
|
||||
raise ValueError(f"invalid date format in parameter: {key}")
|
||||
|
||||
return time
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseChannelChars(self, keys, allowEmpty=False, useExtChars=False):
|
||||
# channel parameters may be specified as a comma separated list and may
|
||||
# be repeated several times
|
||||
values = None
|
||||
for vList in self.getValues(keys):
|
||||
if values is None:
|
||||
values = []
|
||||
for v in vList.split(","):
|
||||
v = v.strip()
|
||||
if allowEmpty and (v == "--" or len(v) == 0):
|
||||
values.append("--")
|
||||
continue
|
||||
|
||||
if (useExtChars and self.ChannelExtChars(v)) or (
|
||||
not useExtChars and self.ChannelChars(v)
|
||||
):
|
||||
raise ValueError(f"invalid characters in parameter: {keys[0]}")
|
||||
values.append(v)
|
||||
|
||||
return values
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parseGET(self, args):
|
||||
# transform keys to lower case
|
||||
if args is not None:
|
||||
for k, v in args.items():
|
||||
k = u_str(k.lower())
|
||||
if k not in self.GETParams:
|
||||
raise ValueError(f"invalid param: {k}")
|
||||
|
||||
self._args[k] = [u_str(x) for x in v]
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parsePOST(self, content):
|
||||
nLine = 0
|
||||
|
||||
for line in content:
|
||||
nLine += 1
|
||||
line = u_str(line.strip())
|
||||
|
||||
# ignore empty and comment lines
|
||||
if len(line) == 0 or line[0] == "#":
|
||||
continue
|
||||
|
||||
# collect parameter (non stream lines)
|
||||
toks = line.split("=", 1)
|
||||
if len(toks) > 1:
|
||||
key = toks[0].strip().lower()
|
||||
|
||||
isPOSTParam = False
|
||||
for p in self.POSTParams:
|
||||
if p == key:
|
||||
if key not in self._args:
|
||||
self._args[key] = []
|
||||
self._args[key].append(toks[1].strip())
|
||||
isPOSTParam = True
|
||||
break
|
||||
|
||||
if isPOSTParam:
|
||||
continue
|
||||
|
||||
# time parameters not allowed in POST header
|
||||
for p in self.TimeParams:
|
||||
if p == key:
|
||||
raise ValueError(
|
||||
f"time parameter in line {nLine} not allowed in POST "
|
||||
"request"
|
||||
)
|
||||
|
||||
# stream parameters not allowed in POST header
|
||||
for p in self.StreamParams:
|
||||
if p == key:
|
||||
raise ValueError(
|
||||
f"stream parameter in line {nLine} not allowed in POST "
|
||||
"request"
|
||||
)
|
||||
|
||||
raise ValueError(f"invalid parameter in line {nLine}")
|
||||
|
||||
# stream parameters
|
||||
toks = line.split()
|
||||
nToks = len(toks)
|
||||
if nToks not in (5, 6):
|
||||
raise ValueError("invalid number of stream components in line {nLine}")
|
||||
|
||||
ro = RequestOptions()
|
||||
|
||||
# net, sta, loc, cha
|
||||
ro.channel = RequestOptions.Channel()
|
||||
ro.channel.net = toks[0].split(",")
|
||||
ro.channel.sta = toks[1].split(",")
|
||||
ro.channel.loc = toks[2].split(",")
|
||||
ro.channel.cha = toks[3].split(",")
|
||||
|
||||
msg = "invalid %s value in line %i"
|
||||
for net in ro.channel.net:
|
||||
if ro.ChannelChars(net):
|
||||
raise ValueError(msg % ("network", nLine))
|
||||
for sta in ro.channel.sta:
|
||||
if ro.ChannelChars(sta):
|
||||
raise ValueError(msg % ("station", nLine))
|
||||
for loc in ro.channel.loc:
|
||||
if loc != "--" and ro.ChannelChars(loc):
|
||||
raise ValueError(msg % ("location", nLine))
|
||||
for cha in ro.channel.cha:
|
||||
if ro.ChannelChars(cha):
|
||||
raise ValueError(msg % ("channel", nLine))
|
||||
|
||||
# start/end time
|
||||
ro.time = RequestOptions.Time()
|
||||
ro.time.start = Time.FromString(toks[4])
|
||||
logEnd = "-"
|
||||
if len(toks) > 5:
|
||||
ro.time.end = Time.FromString(toks[5])
|
||||
logEnd = ro.time.end.iso()
|
||||
|
||||
seiscomp.logging.debug(
|
||||
f"ro: {ro.channel.net}.{ro.channel.sta}.{ro.channel.loc}."
|
||||
f"{ro.channel.cha} {ro.time.start.iso()} {logEnd}"
|
||||
)
|
||||
self.streams.append(ro)
|
||||
|
||||
if not self.streams:
|
||||
raise ValueError("at least one stream line is required")
|
||||
|
||||
|
||||
# vim: ts=4 et
|
||||
936
lib/python/seiscomp/fdsnws/station.py
Normal file
936
lib/python/seiscomp/fdsnws/station.py
Normal file
@ -0,0 +1,936 @@
|
||||
################################################################################
|
||||
# Copyright (C) 2013-2014 gempa GmbH
|
||||
#
|
||||
# FDSNStation -- Implements the fdsnws-station Web service, see
|
||||
# http://www.fdsn.org/webservices/
|
||||
#
|
||||
# Feature notes:
|
||||
# - 'updatedafter' request parameter not implemented: The last modification
|
||||
# time in SeisComP is tracked on the object level. If a child of an object
|
||||
# is updated the update time is not propagated to all parents. In order to
|
||||
# check if a station was updated all children must be evaluated recursively.
|
||||
# This operation would be much to expensive.
|
||||
# - additional request parameters:
|
||||
# - formatted: boolean, default: false
|
||||
# - additional values of request parameters:
|
||||
# - format
|
||||
# - standard: [xml, text]
|
||||
# - additional: [fdsnxml (=xml), stationxml, sc3ml]
|
||||
# - default: xml
|
||||
#
|
||||
# Author: Stephan Herrnkind
|
||||
# Email: herrnkind@gempa.de
|
||||
################################################################################
|
||||
|
||||
from twisted.internet.threads import deferToThread
|
||||
from twisted.web import http, server
|
||||
|
||||
import seiscomp.datamodel
|
||||
import seiscomp.logging
|
||||
from seiscomp.client import Application
|
||||
from seiscomp.core import Time
|
||||
from seiscomp.io import Exporter, ExportObjectList
|
||||
|
||||
from .http import BaseResource
|
||||
from .request import RequestOptions
|
||||
from . import utils
|
||||
|
||||
VERSION = "1.1.6"
|
||||
|
||||
################################################################################
|
||||
|
||||
|
||||
class _StationRequestOptions(RequestOptions):
|
||||
Exporters = {
|
||||
"xml": "fdsnxml",
|
||||
"fdsnxml": "fdsnxml",
|
||||
"stationxml": "staxml",
|
||||
"sc3ml": "trunk",
|
||||
}
|
||||
MinTime = Time(0, 1)
|
||||
|
||||
VText = ["text"]
|
||||
# OutputFormats = list(Exporters) + VText
|
||||
# Default format must be the first, list(Exporters) has random order
|
||||
OutputFormats = ["xml", "fdsnxml", "stationxml", "sc3ml"] + VText
|
||||
|
||||
PLevel = ["level"]
|
||||
PIncludeRestricted = ["includerestricted"]
|
||||
PIncludeAvailability = ["includeavailability"]
|
||||
PUpdateAfter = ["updateafter"]
|
||||
PMatchTimeSeries = ["matchtimeseries"]
|
||||
|
||||
# non standard parameters
|
||||
PFormatted = ["formatted"]
|
||||
|
||||
POSTParams = (
|
||||
RequestOptions.POSTParams
|
||||
+ RequestOptions.GeoParams
|
||||
+ PLevel
|
||||
+ PIncludeRestricted
|
||||
+ PIncludeAvailability
|
||||
+ PUpdateAfter
|
||||
+ PMatchTimeSeries
|
||||
+ PFormatted
|
||||
)
|
||||
GETParams = RequestOptions.GETParams + RequestOptions.WindowTimeParams + POSTParams
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self.service = "fdsnws-station"
|
||||
|
||||
self.includeSta = True
|
||||
self.includeCha = False
|
||||
self.includeRes = False
|
||||
|
||||
self.restricted = None
|
||||
self.availability = None
|
||||
self.updatedAfter = None
|
||||
self.matchTimeSeries = None
|
||||
|
||||
# non standard parameters
|
||||
self.formatted = None
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def parse(self):
|
||||
self.parseTime(True)
|
||||
self.parseChannel()
|
||||
self.parseGeo()
|
||||
self.parseOutput()
|
||||
|
||||
# level: [network, station, channel, response]
|
||||
key, value = self.getFirstValue(self.PLevel)
|
||||
if value is not None:
|
||||
value = value.lower()
|
||||
if value in ("network", "net"):
|
||||
self.includeSta = False
|
||||
elif value in ("channel", "cha", "chan"):
|
||||
self.includeCha = True
|
||||
elif value in ("response", "res", "resp"):
|
||||
self.includeCha = True
|
||||
self.includeRes = True
|
||||
elif value not in ("station", "sta"):
|
||||
self.raiseValueError(key)
|
||||
|
||||
# includeRestricted (optional)
|
||||
self.restricted = self.parseBool(self.PIncludeRestricted)
|
||||
|
||||
# includeAvailability (optionalsc3ml)
|
||||
self.availability = self.parseBool(self.PIncludeAvailability)
|
||||
|
||||
# updatedAfter (optional), currently not supported
|
||||
self.updatedAfter = self.parseTimeStr(self.PUpdateAfter)
|
||||
|
||||
# includeAvailability (optional)
|
||||
self.matchTimeSeries = self.parseBool(self.PMatchTimeSeries)
|
||||
|
||||
# format XML
|
||||
self.formatted = self.parseBool(self.PFormatted)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def networkIter(self, inv, matchTime=False):
|
||||
for i in range(inv.networkCount()):
|
||||
net = inv.network(i)
|
||||
|
||||
for ro in self.streams:
|
||||
# network code
|
||||
if ro.channel and not ro.channel.matchNet(net.code()):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if matchTime and ro.time:
|
||||
try:
|
||||
end = net.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(net.start(), end):
|
||||
continue
|
||||
|
||||
yield net
|
||||
break
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def stationIter(self, net, matchTime=False):
|
||||
for i in range(net.stationCount()):
|
||||
sta = net.station(i)
|
||||
|
||||
# geographic location
|
||||
if self.geo:
|
||||
try:
|
||||
lat = sta.latitude()
|
||||
lon = sta.longitude()
|
||||
except ValueError:
|
||||
continue
|
||||
if not self.geo.match(lat, lon):
|
||||
continue
|
||||
|
||||
for ro in self.streams:
|
||||
# station code
|
||||
if ro.channel and (
|
||||
not ro.channel.matchSta(sta.code())
|
||||
or not ro.channel.matchNet(net.code())
|
||||
):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if matchTime and ro.time:
|
||||
try:
|
||||
end = sta.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(sta.start(), end):
|
||||
continue
|
||||
|
||||
yield sta
|
||||
break
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def locationIter(self, net, sta, matchTime=False):
|
||||
for i in range(sta.sensorLocationCount()):
|
||||
loc = sta.sensorLocation(i)
|
||||
|
||||
for ro in self.streams:
|
||||
# location code
|
||||
if ro.channel and (
|
||||
not ro.channel.matchLoc(loc.code())
|
||||
or not ro.channel.matchSta(sta.code())
|
||||
or not ro.channel.matchNet(net.code())
|
||||
):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if matchTime and ro.time:
|
||||
try:
|
||||
end = loc.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(loc.start(), end):
|
||||
continue
|
||||
|
||||
yield loc
|
||||
break
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def streamIter(self, net, sta, loc, matchTime, dac):
|
||||
for i in range(loc.streamCount()):
|
||||
stream = loc.stream(i)
|
||||
|
||||
for ro in self.streams:
|
||||
# stream code
|
||||
if ro.channel and (
|
||||
not ro.channel.matchCha(stream.code())
|
||||
or not ro.channel.matchLoc(loc.code())
|
||||
or not ro.channel.matchSta(sta.code())
|
||||
or not ro.channel.matchNet(net.code())
|
||||
):
|
||||
continue
|
||||
|
||||
# start and end time
|
||||
if matchTime and ro.time:
|
||||
try:
|
||||
end = stream.end()
|
||||
except ValueError:
|
||||
end = None
|
||||
if not ro.time.match(stream.start(), end):
|
||||
continue
|
||||
|
||||
# match data availability extent
|
||||
if dac is not None and self.matchTimeSeries:
|
||||
extent = dac.extent(
|
||||
net.code(), sta.code(), loc.code(), stream.code()
|
||||
)
|
||||
if extent is None or (
|
||||
ro.time and not ro.time.match(extent.start(), extent.end())
|
||||
):
|
||||
continue
|
||||
|
||||
yield stream
|
||||
break
|
||||
|
||||
|
||||
################################################################################
|
||||
class FDSNStation(BaseResource):
|
||||
isLeaf = True
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def __init__(
|
||||
self,
|
||||
inv,
|
||||
restricted,
|
||||
maxObj,
|
||||
daEnabled,
|
||||
conditionalRequestsEnabled,
|
||||
timeInventoryLoaded,
|
||||
):
|
||||
super().__init__(VERSION)
|
||||
|
||||
self._inv = inv
|
||||
self._allowRestricted = restricted
|
||||
self._maxObj = maxObj
|
||||
self._daEnabled = daEnabled
|
||||
self._conditionalRequestsEnabled = conditionalRequestsEnabled
|
||||
self._timeInventoryLoaded = timeInventoryLoaded.seconds()
|
||||
|
||||
# additional object count dependent on detail level
|
||||
self._resLevelCount = (
|
||||
inv.responsePAZCount()
|
||||
+ inv.responseFIRCount()
|
||||
+ inv.responsePolynomialCount()
|
||||
+ inv.responseIIRCount()
|
||||
+ inv.responseFAPCount()
|
||||
)
|
||||
for i in range(inv.dataloggerCount()):
|
||||
self._resLevelCount += inv.datalogger(i).decimationCount()
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render_OPTIONS(self, req):
|
||||
req.setHeader("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
|
||||
req.setHeader(
|
||||
"Access-Control-Allow-Headers",
|
||||
"Accept, Content-Type, X-Requested-With, Origin",
|
||||
)
|
||||
req.setHeader("Content-Type", "text/plain; charset=utf-8")
|
||||
return ""
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render_GET(self, req):
|
||||
# Parse and validate GET parameters
|
||||
ro = _StationRequestOptions()
|
||||
try:
|
||||
ro.parseGET(req.args)
|
||||
ro.parse()
|
||||
# the GET operation supports exactly one stream filter
|
||||
ro.streams.append(ro)
|
||||
except ValueError as e:
|
||||
seiscomp.logging.warning(str(e))
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
|
||||
|
||||
return self._prepareRequest(req, ro)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def render_POST(self, req):
|
||||
# Parse and validate POST parameters
|
||||
ro = _StationRequestOptions()
|
||||
try:
|
||||
ro.parsePOST(req.content)
|
||||
ro.parse()
|
||||
except ValueError as e:
|
||||
seiscomp.logging.warning(str(e))
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
|
||||
|
||||
return self._prepareRequest(req, ro)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def _prepareRequest(self, req, ro):
|
||||
if ro.availability and not self._daEnabled:
|
||||
msg = "including of availability information not supported"
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
|
||||
|
||||
if ro.updatedAfter:
|
||||
msg = "filtering based on update time not supported"
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
|
||||
|
||||
if ro.matchTimeSeries and not self._daEnabled:
|
||||
msg = "filtering based on available time series not supported"
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
|
||||
|
||||
# load data availability if requested
|
||||
dac = None
|
||||
if ro.availability or ro.matchTimeSeries:
|
||||
dac = Application.Instance().getDACache()
|
||||
if dac is None or len(dac.extents()) == 0:
|
||||
msg = "no data availabiltiy extent information found"
|
||||
return self.renderErrorPage(req, http.NO_CONTENT, msg, ro)
|
||||
|
||||
# Exporter, 'None' is used for text output
|
||||
if ro.format in ro.VText:
|
||||
if ro.includeRes:
|
||||
msg = "response level output not available in text format"
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
|
||||
req.setHeader("Content-Type", "text/plain; charset=utf-8")
|
||||
d = deferToThread(self._processRequestText, req, ro, dac)
|
||||
else:
|
||||
exp = Exporter.Create(ro.Exporters[ro.format])
|
||||
if exp is None:
|
||||
msg = (
|
||||
f"output format '{ro.format}' no available, export module "
|
||||
f"'{ro.Exporters[ro.format]}' could not be loaded."
|
||||
)
|
||||
return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
|
||||
|
||||
req.setHeader("Content-Type", "application/xml; charset=utf-8")
|
||||
exp.setFormattedOutput(bool(ro.formatted))
|
||||
d = deferToThread(self._processRequestExp, req, ro, exp, dac)
|
||||
|
||||
req.notifyFinish().addErrback(utils.onCancel, d)
|
||||
d.addBoth(utils.onFinish, req)
|
||||
|
||||
# The request is handled by the deferred object
|
||||
return server.NOT_DONE_YET
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def _processRequestExp(self, req, ro, exp, dac):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
|
||||
staCount, locCount, chaCount, extCount, objCount = 0, 0, 0, 0, 0
|
||||
|
||||
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
|
||||
newInv = seiscomp.datamodel.Inventory()
|
||||
dataloggers, sensors, extents = set(), set(), {}
|
||||
|
||||
skipRestricted = not self._allowRestricted or (
|
||||
ro.restricted is not None and not ro.restricted
|
||||
)
|
||||
levelNet = not ro.includeSta
|
||||
levelSta = ro.includeSta and not ro.includeCha
|
||||
|
||||
isConditionalRequest = self._isConditionalRequest(req)
|
||||
|
||||
# iterate over inventory networks
|
||||
for net in ro.networkIter(self._inv, levelNet):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
if skipRestricted and utils.isRestricted(net):
|
||||
continue
|
||||
newNet = seiscomp.datamodel.Network(net)
|
||||
|
||||
# Copy comments
|
||||
for i in range(net.commentCount()):
|
||||
newNet.add(seiscomp.datamodel.Comment(net.comment(i)))
|
||||
|
||||
# iterate over inventory stations of current network
|
||||
for sta in ro.stationIter(net, levelSta):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
if skipRestricted and utils.isRestricted(sta):
|
||||
continue
|
||||
if not self.checkObjects(req, objCount, self._maxObj):
|
||||
return False
|
||||
|
||||
if ro.includeCha:
|
||||
numCha, numLoc, d, s, e = self._processStation(
|
||||
newNet, net, sta, ro, dac, skipRestricted, isConditionalRequest
|
||||
)
|
||||
if numCha > 0:
|
||||
if isConditionalRequest:
|
||||
self.returnNotModified(req, ro)
|
||||
return True
|
||||
locCount += numLoc
|
||||
chaCount += numCha
|
||||
extCount += len(e)
|
||||
objCount += numLoc + numCha + extCount
|
||||
if not self.checkObjects(req, objCount, self._maxObj):
|
||||
return False
|
||||
dataloggers |= d
|
||||
sensors |= s
|
||||
for k, v in e.items():
|
||||
if k not in extents:
|
||||
extents[k] = v
|
||||
elif self._matchStation(net, sta, ro, dac):
|
||||
if isConditionalRequest:
|
||||
self.returnNotModified(req, ro)
|
||||
return True
|
||||
if ro.includeSta:
|
||||
newSta = seiscomp.datamodel.Station(sta)
|
||||
# Copy comments
|
||||
for i in range(sta.commentCount()):
|
||||
newSta.add(seiscomp.datamodel.Comment(sta.comment(i)))
|
||||
newNet.add(newSta)
|
||||
else:
|
||||
# no station output requested: one matching station
|
||||
# is sufficient to include the network
|
||||
newInv.add(newNet)
|
||||
objCount += 1
|
||||
break
|
||||
|
||||
if newNet.stationCount() > 0:
|
||||
newInv.add(newNet)
|
||||
staCount += newNet.stationCount()
|
||||
objCount += staCount + 1
|
||||
|
||||
# Return 204 if no matching inventory was found
|
||||
if newInv.networkCount() == 0:
|
||||
msg = "no matching inventory found"
|
||||
self.writeErrorPage(req, http.NO_CONTENT, msg, ro)
|
||||
return True
|
||||
|
||||
if self._conditionalRequestsEnabled:
|
||||
req.setHeader(
|
||||
"Last-Modified", http.datetimeToString(self._timeInventoryLoaded)
|
||||
)
|
||||
|
||||
# Copy references (dataloggers, responses, sensors)
|
||||
decCount, resCount = 0, 0
|
||||
if ro.includeCha:
|
||||
decCount = self._copyReferences(
|
||||
newInv, req, objCount, self._inv, ro, dataloggers, sensors, self._maxObj
|
||||
)
|
||||
if decCount is None:
|
||||
return False
|
||||
|
||||
resCount = (
|
||||
newInv.responsePAZCount()
|
||||
+ newInv.responseFIRCount()
|
||||
+ newInv.responsePolynomialCount()
|
||||
+ newInv.responseFAPCount()
|
||||
+ newInv.responseIIRCount()
|
||||
)
|
||||
objCount += (
|
||||
resCount + decCount + newInv.dataloggerCount() + newInv.sensorCount()
|
||||
)
|
||||
|
||||
# Copy data extents
|
||||
objOut = newInv
|
||||
if len(extents) > 0:
|
||||
objCount += 1
|
||||
da = seiscomp.datamodel.DataAvailability()
|
||||
for k, v in extents.items():
|
||||
objCount += 1
|
||||
da.add(seiscomp.datamodel.DataExtent(v))
|
||||
objOut = ExportObjectList()
|
||||
objOut.append(newInv)
|
||||
objOut.append(da)
|
||||
|
||||
sink = utils.Sink(req)
|
||||
if not exp.write(sink, objOut):
|
||||
return False
|
||||
|
||||
seiscomp.logging.debug(
|
||||
f"{ro.service}: returned {newInv.networkCount()}Net, {staCount}Sta, "
|
||||
f"{locCount}Loc, {chaCount}Cha, {newInv.dataloggerCount()}DL, "
|
||||
f"{decCount}Dec, {newInv.sensorCount()}Sen, {resCount}Res, {extCount}DAExt "
|
||||
f"(total objects/bytes: {objCount}/{sink.written})"
|
||||
)
|
||||
utils.accessLog(req, ro, http.OK, sink.written, None)
|
||||
return True
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def _formatEpoch(obj):
|
||||
df = "%FT%T"
|
||||
dfMS = "%FT%T.%f"
|
||||
|
||||
if obj.start().microseconds() > 0:
|
||||
start = obj.start().toString(dfMS)
|
||||
else:
|
||||
start = obj.start().toString(df)
|
||||
|
||||
try:
|
||||
if obj.end().microseconds() > 0:
|
||||
end = obj.end().toString(dfMS)
|
||||
else:
|
||||
end = obj.end().toString(df)
|
||||
except ValueError:
|
||||
end = ""
|
||||
|
||||
return start, end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def _processRequestText(self, req, ro, dac):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
|
||||
skipRestricted = not self._allowRestricted or (
|
||||
ro.restricted is not None and not ro.restricted
|
||||
)
|
||||
isConditionalRequest = self._isConditionalRequest(req)
|
||||
|
||||
data = ""
|
||||
lines = []
|
||||
|
||||
# level = network
|
||||
if not ro.includeSta:
|
||||
data = "#Network|Description|StartTime|EndTime|TotalStations\n"
|
||||
|
||||
# iterate over inventory networks
|
||||
for net in ro.networkIter(self._inv, True):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
if skipRestricted and utils.isRestricted(net):
|
||||
continue
|
||||
|
||||
# at least one matching station is required
|
||||
stationFound = False
|
||||
for sta in ro.stationIter(net, False):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
if self._matchStation(net, sta, ro, dac) and not (
|
||||
skipRestricted and utils.isRestricted(sta)
|
||||
):
|
||||
stationFound = True
|
||||
break
|
||||
if not stationFound:
|
||||
continue
|
||||
if isConditionalRequest:
|
||||
self.returnNotModified(req, ro)
|
||||
return True
|
||||
|
||||
start, end = self._formatEpoch(net)
|
||||
lines.append(
|
||||
(
|
||||
f"{net.code()} {start}",
|
||||
f"{net.code()}|{net.description()}|{start}|{end}|"
|
||||
f"{net.stationCount()}\n",
|
||||
)
|
||||
)
|
||||
|
||||
# level = station
|
||||
elif not ro.includeCha:
|
||||
data = (
|
||||
"#Network|Station|Latitude|Longitude|Elevation|"
|
||||
"SiteName|StartTime|EndTime\n"
|
||||
)
|
||||
|
||||
# iterate over inventory networks
|
||||
for net in ro.networkIter(self._inv, False):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
if skipRestricted and utils.isRestricted(net):
|
||||
continue
|
||||
# iterate over inventory stations
|
||||
for sta in ro.stationIter(net, True):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
if not self._matchStation(net, sta, ro, dac) or (
|
||||
skipRestricted and utils.isRestricted(sta)
|
||||
):
|
||||
continue
|
||||
if isConditionalRequest:
|
||||
self.returnNotModified(req, ro)
|
||||
return True
|
||||
|
||||
try:
|
||||
lat = str(sta.latitude())
|
||||
except ValueError:
|
||||
lat = ""
|
||||
try:
|
||||
lon = str(sta.longitude())
|
||||
except ValueError:
|
||||
lon = ""
|
||||
try:
|
||||
elev = str(sta.elevation())
|
||||
except ValueError:
|
||||
elev = ""
|
||||
try:
|
||||
desc = sta.description()
|
||||
except ValueError:
|
||||
desc = ""
|
||||
|
||||
start, end = self._formatEpoch(sta)
|
||||
lines.append(
|
||||
(
|
||||
f"{net.code()}.{sta.code()} {start}",
|
||||
f"{net.code()}|{sta.code()}|{lat}|{lon}|{elev}|{desc}|"
|
||||
f"{start}|{end}\n",
|
||||
)
|
||||
)
|
||||
|
||||
# level = channel (resonse level not supported in text format)
|
||||
else:
|
||||
data = (
|
||||
"#Network|Station|Location|Channel|Latitude|Longitude|"
|
||||
"Elevation|Depth|Azimuth|Dip|SensorDescription|Scale|"
|
||||
"ScaleFreq|ScaleUnits|SampleRate|StartTime|EndTime\n"
|
||||
)
|
||||
|
||||
# iterate over inventory networks
|
||||
for net in ro.networkIter(self._inv, False):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
if skipRestricted and utils.isRestricted(net):
|
||||
continue
|
||||
# iterate over inventory stations, locations, streams
|
||||
for sta in ro.stationIter(net, False):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return False
|
||||
if skipRestricted and utils.isRestricted(sta):
|
||||
continue
|
||||
for loc in ro.locationIter(net, sta, True):
|
||||
for stream in ro.streamIter(net, sta, loc, True, dac):
|
||||
if skipRestricted and utils.isRestricted(stream):
|
||||
continue
|
||||
if isConditionalRequest:
|
||||
self.returnNotModified(req, ro)
|
||||
return True
|
||||
|
||||
try:
|
||||
lat = str(loc.latitude())
|
||||
except ValueError:
|
||||
lat = ""
|
||||
try:
|
||||
lon = str(loc.longitude())
|
||||
except ValueError:
|
||||
lon = ""
|
||||
try:
|
||||
elev = str(loc.elevation())
|
||||
except ValueError:
|
||||
elev = ""
|
||||
try:
|
||||
depth = str(stream.depth())
|
||||
except ValueError:
|
||||
depth = ""
|
||||
try:
|
||||
azi = str(stream.azimuth())
|
||||
except ValueError:
|
||||
azi = ""
|
||||
try:
|
||||
dip = str(stream.dip())
|
||||
except ValueError:
|
||||
dip = ""
|
||||
|
||||
desc = ""
|
||||
try:
|
||||
sensor = self._inv.findSensor(stream.sensor())
|
||||
if sensor is not None:
|
||||
desc = sensor.description()
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
try:
|
||||
scale = str(stream.gain())
|
||||
except ValueError:
|
||||
scale = ""
|
||||
try:
|
||||
scaleFreq = str(stream.gainFrequency())
|
||||
except ValueError:
|
||||
scaleFreq = ""
|
||||
try:
|
||||
scaleUnit = str(stream.gainUnit())
|
||||
except ValueError:
|
||||
scaleUnit = ""
|
||||
try:
|
||||
sr = str(
|
||||
stream.sampleRateNumerator()
|
||||
/ stream.sampleRateDenominator()
|
||||
)
|
||||
except (ValueError, ZeroDivisionError):
|
||||
sr = ""
|
||||
|
||||
start, end = self._formatEpoch(stream)
|
||||
lines.append(
|
||||
(
|
||||
f"{net.code()}.{sta.code()}.{loc.code()}."
|
||||
f"{stream.code()} {start}",
|
||||
f"{net.code()}|{sta.code()}|{loc.code()}|"
|
||||
f"{stream.code()}|{lat}|{lon}|{elev}|{depth}|{azi}|"
|
||||
f"{dip}|{desc}|{scale}|{scaleFreq}|{scaleUnit}|"
|
||||
f"{sr}|{start}|{end}\n",
|
||||
)
|
||||
)
|
||||
|
||||
# sort lines and append to final data string
|
||||
lines.sort(key=lambda line: line[0])
|
||||
for line in lines:
|
||||
data += line[1]
|
||||
|
||||
# Return 204 if no matching inventory was found
|
||||
if len(lines) == 0:
|
||||
msg = "no matching inventory found"
|
||||
self.writeErrorPage(req, http.NO_CONTENT, msg, ro)
|
||||
return False
|
||||
|
||||
if self._conditionalRequestsEnabled:
|
||||
req.setHeader(
|
||||
"Last-Modified", http.datetimeToString(self._timeInventoryLoaded)
|
||||
)
|
||||
|
||||
dataBin = utils.b_str(data)
|
||||
utils.writeTSBin(req, dataBin)
|
||||
seiscomp.logging.debug(
|
||||
f"{ro.service}: returned {len(lines)} lines (total bytes: {len(dataBin)})"
|
||||
)
|
||||
utils.accessLog(req, ro, http.OK, len(dataBin), None)
|
||||
return True
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
def _isConditionalRequest(self, req):
|
||||
# support for time based conditional requests
|
||||
if not self._conditionalRequestsEnabled:
|
||||
return False
|
||||
if req.method not in (b"GET", b"HEAD"):
|
||||
return False
|
||||
if req.getHeader("If-None-Match") is not None:
|
||||
return False
|
||||
|
||||
modifiedSince = req.getHeader("If-Modified-Since")
|
||||
if not modifiedSince:
|
||||
return False
|
||||
|
||||
modifiedSince = utils.stringToDatetime(modifiedSince)
|
||||
return modifiedSince and self._timeInventoryLoaded <= modifiedSince
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Checks if at least one location and channel combination matches the
|
||||
# request options
|
||||
@staticmethod
|
||||
def _matchStation(net, sta, ro, dac):
|
||||
# No filter: return true immediately
|
||||
if dac is None and (
|
||||
not ro.channel or (not ro.channel.loc and not ro.channel.cha)
|
||||
):
|
||||
return True
|
||||
|
||||
for loc in ro.locationIter(net, sta, False):
|
||||
if dac is None and not ro.channel.cha and not ro.time:
|
||||
return True
|
||||
|
||||
for _ in ro.streamIter(net, sta, loc, False, dac):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Adds a deep copy of the specified station to the new network if the
|
||||
# location and channel combination matches the request options (if any)
|
||||
@staticmethod
|
||||
def _processStation(
|
||||
newNet, net, sta, ro, dac, skipRestricted, isConditionalRequest
|
||||
):
|
||||
chaCount = 0
|
||||
dataloggers, sensors, extents = set(), set(), {}
|
||||
newSta = seiscomp.datamodel.Station(sta)
|
||||
includeAvailability = dac is not None and ro.availability
|
||||
|
||||
# Copy comments
|
||||
for i in range(sta.commentCount()):
|
||||
newSta.add(seiscomp.datamodel.Comment(sta.comment(i)))
|
||||
|
||||
for loc in ro.locationIter(net, sta, True):
|
||||
newLoc = seiscomp.datamodel.SensorLocation(loc)
|
||||
# Copy comments
|
||||
for i in range(loc.commentCount()):
|
||||
newLoc.add(seiscomp.datamodel.Comment(loc.comment(i)))
|
||||
|
||||
for stream in ro.streamIter(net, sta, loc, True, dac):
|
||||
if skipRestricted and utils.isRestricted(stream):
|
||||
continue
|
||||
if isConditionalRequest:
|
||||
return 1, 1, [], [], []
|
||||
newCha = seiscomp.datamodel.Stream(stream)
|
||||
# Copy comments
|
||||
for i in range(stream.commentCount()):
|
||||
newCha.add(seiscomp.datamodel.Comment(stream.comment(i)))
|
||||
newLoc.add(newCha)
|
||||
dataloggers.add(stream.datalogger())
|
||||
sensors.add(stream.sensor())
|
||||
if includeAvailability:
|
||||
ext = dac.extent(net.code(), sta.code(), loc.code(), stream.code())
|
||||
if ext is not None and ext.publicID() not in extents:
|
||||
extents[ext.publicID()] = ext
|
||||
|
||||
if newLoc.streamCount() > 0:
|
||||
newSta.add(newLoc)
|
||||
chaCount += newLoc.streamCount()
|
||||
|
||||
if newSta.sensorLocationCount() > 0:
|
||||
newNet.add(newSta)
|
||||
return chaCount, newSta.sensorLocationCount(), dataloggers, sensors, extents
|
||||
|
||||
return 0, 0, [], [], []
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Copy references (data loggers, sensors, responses) depended on request
|
||||
# options
|
||||
def _copyReferences(
|
||||
self, newInv, req, objCount, inv, ro, dataloggers, sensors, maxObj
|
||||
):
|
||||
responses = set()
|
||||
decCount = 0
|
||||
|
||||
# datalogger
|
||||
for i in range(inv.dataloggerCount()):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return None
|
||||
logger = inv.datalogger(i)
|
||||
if logger.publicID() not in dataloggers:
|
||||
continue
|
||||
newLogger = seiscomp.datamodel.Datalogger(logger)
|
||||
newInv.add(newLogger)
|
||||
# decimations are only needed for responses
|
||||
if ro.includeRes:
|
||||
for j in range(logger.decimationCount()):
|
||||
decimation = logger.decimation(j)
|
||||
newLogger.add(seiscomp.datamodel.Decimation(decimation))
|
||||
|
||||
# collect response ids
|
||||
filterStr = ""
|
||||
try:
|
||||
filterStr = f"{decimation.analogueFilterChain().content()} "
|
||||
except ValueError:
|
||||
pass
|
||||
try:
|
||||
filterStr += decimation.digitalFilterChain().content()
|
||||
except ValueError:
|
||||
pass
|
||||
for resp in filterStr.split():
|
||||
responses.add(resp)
|
||||
decCount += newLogger.decimationCount()
|
||||
|
||||
objCount += newInv.dataloggerCount() + decCount
|
||||
resCount = len(responses)
|
||||
if not self.checkObjects(req, objCount + resCount, maxObj):
|
||||
return None
|
||||
|
||||
# sensor
|
||||
for i in range(inv.sensorCount()):
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return None
|
||||
sensor = inv.sensor(i)
|
||||
if sensor.publicID() not in sensors:
|
||||
continue
|
||||
newSensor = seiscomp.datamodel.Sensor(sensor)
|
||||
newInv.add(newSensor)
|
||||
resp = newSensor.response()
|
||||
if resp:
|
||||
if ro.includeRes:
|
||||
responses.add(resp)
|
||||
else:
|
||||
# no responses: remove response reference to avoid missing
|
||||
# response warning of exporter
|
||||
newSensor.setResponse("")
|
||||
|
||||
objCount += newInv.sensorCount()
|
||||
resCount = len(responses)
|
||||
if not self.checkObjects(req, objCount + resCount, maxObj):
|
||||
return None
|
||||
|
||||
# responses
|
||||
if ro.includeRes:
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return None
|
||||
for i in range(inv.responsePAZCount()):
|
||||
resp = inv.responsePAZ(i)
|
||||
if resp.publicID() in responses:
|
||||
newInv.add(seiscomp.datamodel.ResponsePAZ(resp))
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return None
|
||||
for i in range(inv.responseFIRCount()):
|
||||
resp = inv.responseFIR(i)
|
||||
if resp.publicID() in responses:
|
||||
newInv.add(seiscomp.datamodel.ResponseFIR(resp))
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return None
|
||||
for i in range(inv.responsePolynomialCount()):
|
||||
resp = inv.responsePolynomial(i)
|
||||
if resp.publicID() in responses:
|
||||
newInv.add(seiscomp.datamodel.ResponsePolynomial(resp))
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return None
|
||||
for i in range(inv.responseFAPCount()):
|
||||
resp = inv.responseFAP(i)
|
||||
if resp.publicID() in responses:
|
||||
newInv.add(seiscomp.datamodel.ResponseFAP(resp))
|
||||
if req._disconnected: # pylint: disable=W0212
|
||||
return None
|
||||
for i in range(inv.responseIIRCount()):
|
||||
resp = inv.responseIIR(i)
|
||||
if resp.publicID() in responses:
|
||||
newInv.add(seiscomp.datamodel.ResponseIIR(resp))
|
||||
|
||||
return decCount
|
||||
|
||||
|
||||
# vim: ts=4 et
|
||||
201
lib/python/seiscomp/fdsnws/utils.py
Normal file
201
lib/python/seiscomp/fdsnws/utils.py
Normal file
@ -0,0 +1,201 @@
|
||||
################################################################################
|
||||
# Copyright (C) 2013-2014 gempa GmbH
|
||||
#
|
||||
# Common utility functions
|
||||
#
|
||||
# Author: Stephan Herrnkind
|
||||
# Email: herrnkind@gempa.de
|
||||
################################################################################
|
||||
|
||||
import socket
|
||||
import traceback
|
||||
|
||||
import twisted
|
||||
|
||||
from twisted.internet import reactor, defer
|
||||
from twisted.python.failure import Failure
|
||||
from twisted.web import http
|
||||
|
||||
|
||||
import seiscomp.logging
|
||||
import seiscomp.core
|
||||
import seiscomp.io
|
||||
from seiscomp.client import Application
|
||||
|
||||
twisted_version = (twisted.version.major, twisted.version.minor, twisted.version.micro)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Converts a unicode string to a byte string
|
||||
def b_str(unicode_string):
|
||||
return unicode_string.encode("utf-8", "replace")
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Converts a byte string to a unicode string
|
||||
def u_str(byte_string):
|
||||
return byte_string.decode("utf-8", "replace")
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Tests if a SC3 inventory object is restricted
|
||||
def isRestricted(obj):
|
||||
try:
|
||||
return obj.restricted()
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Thread-safe write of string data using reactor main thread
|
||||
def writeTS(req, data):
|
||||
reactor.callFromThread(req.write, b_str(data))
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Thread-safe write of binary data using reactor main thread
|
||||
def writeTSBin(req, data):
|
||||
reactor.callFromThread(req.write, data)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Finish requests deferred to threads
|
||||
def onFinish(result, req):
|
||||
seiscomp.logging.debug(f"finish value = {str(result)}")
|
||||
if isinstance(result, Failure):
|
||||
err = result.value
|
||||
if isinstance(err, defer.CancelledError):
|
||||
seiscomp.logging.error("request canceled")
|
||||
return
|
||||
seiscomp.logging.error(
|
||||
f"{result.getErrorMessage()} "
|
||||
f"{traceback.format_tb(result.getTracebackObject())}"
|
||||
)
|
||||
else:
|
||||
if result:
|
||||
seiscomp.logging.debug("request successfully served")
|
||||
else:
|
||||
seiscomp.logging.debug("request failed")
|
||||
|
||||
reactor.callFromThread(req.finish)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Handle connection errors
|
||||
def onCancel(failure, req):
|
||||
if failure:
|
||||
seiscomp.logging.error(
|
||||
f"{failure.getErrorMessage()} "
|
||||
f"{traceback.format_tb(failure.getTracebackObject())}"
|
||||
)
|
||||
else:
|
||||
seiscomp.logging.error("request canceled")
|
||||
req.cancel()
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Handle premature connection reset
|
||||
def onResponseFailure(_, call):
|
||||
seiscomp.logging.error("response canceled")
|
||||
call.cancel()
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Renders error page if the result set exceeds the configured maximum number
|
||||
# objects
|
||||
def accessLog(req, ro, code, length, err):
|
||||
logger = Application.Instance()._accessLog # pylint: disable=W0212
|
||||
if logger is None:
|
||||
return
|
||||
|
||||
logger.log(AccessLogEntry(req, ro, code, length, err))
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Compability function for stringToDatetime() change in Twisted 24.7, see
|
||||
# https://github.com/twisted/twisted/commit/731e370dfc5d2f7224dc1e12931ddf5c51b211a6
|
||||
def stringToDatetime(dateString):
|
||||
if twisted_version < (24, 7):
|
||||
return http.stringToDatetime(dateString)
|
||||
|
||||
# Since version 24.7 the argument needs to be a byte string
|
||||
return http.stringToDatetime(dateString.encode("ascii"))
|
||||
|
||||
|
||||
################################################################################
|
||||
class Sink(seiscomp.io.ExportSink):
|
||||
def __init__(self, request):
|
||||
super().__init__()
|
||||
|
||||
self.request = request
|
||||
self.written = 0
|
||||
|
||||
def write(self, data):
|
||||
if self.request._disconnected: # pylint: disable=W0212
|
||||
return -1
|
||||
|
||||
writeTSBin(self.request, data)
|
||||
self.written += len(data)
|
||||
return len(data)
|
||||
|
||||
|
||||
################################################################################
|
||||
class AccessLogEntry:
|
||||
def __init__(self, req, ro, code, length, err):
|
||||
# user agent
|
||||
agent = req.getHeader("User-Agent")
|
||||
if agent is None:
|
||||
agent = ""
|
||||
else:
|
||||
agent = agent[:100].replace("|", " ")
|
||||
|
||||
if err is None:
|
||||
err = ""
|
||||
|
||||
service, user, accessTime, procTime = "", "", "", 0
|
||||
net, sta, loc, cha = "", "", "", ""
|
||||
if ro is not None:
|
||||
# processing time in milliseconds
|
||||
procTime = int((seiscomp.core.Time.GMT() - ro.accessTime).length() * 1000.0)
|
||||
|
||||
service = ro.service
|
||||
if ro.userName is not None:
|
||||
user = ro.userName
|
||||
accessTime = str(ro.accessTime)
|
||||
|
||||
if ro.channel is not None:
|
||||
if ro.channel.net is not None:
|
||||
net = ",".join(ro.channel.net)
|
||||
if ro.channel.sta is not None:
|
||||
sta = ",".join(ro.channel.sta)
|
||||
if ro.channel.loc is not None:
|
||||
loc = ",".join(ro.channel.loc)
|
||||
if ro.channel.cha is not None:
|
||||
cha = ",".join(ro.channel.cha)
|
||||
|
||||
# The host name of the client is resolved in the __str__ method by the
|
||||
# logging thread so that a long running DNS reverse lookup may not slow
|
||||
# down the request
|
||||
self.msgPrefix = f"{service}|{u_str(req.getRequestHostname())}|{accessTime}|"
|
||||
|
||||
xff = req.requestHeaders.getRawHeaders("x-forwarded-for")
|
||||
if xff:
|
||||
self.userIP = xff[0].split(",")[0].strip()
|
||||
else:
|
||||
self.userIP = req.getClientIP()
|
||||
|
||||
self.clientIP = req.getClientIP()
|
||||
self.msgSuffix = (
|
||||
f"|{self.clientIP}|{length}|{procTime}|{err}|{agent}|{code}|{user}|{net}"
|
||||
f"|{sta}|{loc}|{cha}||"
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
try:
|
||||
userHost = socket.gethostbyaddr(self.userIP)[0]
|
||||
except socket.herror:
|
||||
userHost = self.userIP
|
||||
return self.msgPrefix + userHost + self.msgSuffix
|
||||
|
||||
|
||||
# vim: ts=4 et
|
||||
1275
lib/python/seiscomp/geo.py
Normal file
1275
lib/python/seiscomp/geo.py
Normal file
File diff suppressed because it is too large
Load Diff
2532
lib/python/seiscomp/io.py
Normal file
2532
lib/python/seiscomp/io.py
Normal file
File diff suppressed because it is too large
Load Diff
386
lib/python/seiscomp/kernel.py
Normal file
386
lib/python/seiscomp/kernel.py
Normal file
@ -0,0 +1,386 @@
|
||||
############################################################################
|
||||
# Copyright (C) by gempa GmbH, GFZ Potsdam #
|
||||
# #
|
||||
# You can redistribute and/or modify this program under the #
|
||||
# terms of the SeisComP Public License. #
|
||||
# #
|
||||
# This program is distributed in the hope that it will be useful, #
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
|
||||
# SeisComP Public License for more details. #
|
||||
############################################################################
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import string
|
||||
import subprocess
|
||||
import seiscomp.config
|
||||
|
||||
|
||||
class Template(string.Template):
|
||||
idpattern = r'[_a-z][_a-z0-9.]*'
|
||||
|
||||
|
||||
class Environment(seiscomp.config.Config):
|
||||
def __init__(self, rootPath):
|
||||
seiscomp.config.Config.__init__(self)
|
||||
self.SEISCOMP_ROOT = rootPath
|
||||
try:
|
||||
self.home_dir = os.environ["HOME"]
|
||||
except:
|
||||
self.home_dir = "."
|
||||
|
||||
try:
|
||||
self.local_config_dir = os.environ["SEISCOMP_LOCAL_CONFIG"]
|
||||
except:
|
||||
self.local_config_dir = os.path.join(self.home_dir, ".seiscomp")
|
||||
|
||||
self.root = rootPath
|
||||
self.bin_dir = os.path.join(self.root, "bin")
|
||||
self.data_dir = os.path.join(self.root, "share")
|
||||
self.etc_dir = os.path.join(self.root, "etc")
|
||||
self.etc_defaults_dir = os.path.join(self.root, "etc", "defaults")
|
||||
self.descriptions_dir = os.path.join(self.root, "etc", "descriptions")
|
||||
self.key_dir = os.path.join(self.root, "etc", "key")
|
||||
self.var_dir = os.path.join(self.root, "var")
|
||||
self.log_dir = os.path.join(self.local_config_dir, "log")
|
||||
self.cwd = None
|
||||
self.last_template_file = None
|
||||
|
||||
self._csv = False
|
||||
self._readConfig()
|
||||
|
||||
os.environ["SEISCOMP_ROOT"] = self.SEISCOMP_ROOT
|
||||
|
||||
# Add LD_LIBRARY_PATH and PATH to OS environment
|
||||
LD_LIBRARY_PATH = os.path.join(self.SEISCOMP_ROOT, "lib")
|
||||
BIN_PATH = os.path.join(self.SEISCOMP_ROOT, "bin")
|
||||
SBIN_PATH = os.path.join(self.SEISCOMP_ROOT, "sbin")
|
||||
PATH = BIN_PATH + ":" + SBIN_PATH
|
||||
PYTHONPATH = os.path.join(self.SEISCOMP_ROOT, "lib", "python")
|
||||
try:
|
||||
LD_LIBRARY_PATH = os.environ["LD_LIBRARY_PATH"] + \
|
||||
":" + LD_LIBRARY_PATH
|
||||
except:
|
||||
pass
|
||||
os.environ["LD_LIBRARY_PATH"] = LD_LIBRARY_PATH
|
||||
try:
|
||||
PATH = PATH + ":" + os.environ["PATH"]
|
||||
except:
|
||||
pass
|
||||
os.environ["PATH"] = PATH
|
||||
try:
|
||||
PYTHONPATH = os.environ["PYTHONPATH"] + ":" + PYTHONPATH
|
||||
except:
|
||||
pass
|
||||
os.environ["PYTHONPATH"] = PYTHONPATH
|
||||
|
||||
# Create required directories
|
||||
try:
|
||||
os.makedirs(os.path.join(self.root, "var", "log"))
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
os.makedirs(os.path.join(self.root, "var", "run"))
|
||||
except:
|
||||
pass
|
||||
|
||||
def _readConfig(self):
|
||||
self.syslog = False
|
||||
|
||||
# Read configuration file
|
||||
kernelCfg = os.path.join(self.root, "etc", "kernel.cfg")
|
||||
if self.readConfig(kernelCfg) == False:
|
||||
return
|
||||
|
||||
try:
|
||||
self.syslog = self.getBool("syslog")
|
||||
except:
|
||||
pass
|
||||
|
||||
# Changes into the SEISCOMP_ROOT directory
|
||||
def chroot(self):
|
||||
if self.root:
|
||||
# Remember current directory
|
||||
self.cwd = os.getcwd()
|
||||
os.chdir(self.SEISCOMP_ROOT)
|
||||
self.root = ""
|
||||
|
||||
# Changes back to the current workdir
|
||||
def chback(self):
|
||||
if self.cwd:
|
||||
os.chdir(self.cwd)
|
||||
self.cwd = None
|
||||
self.root = self.SEISCOMP_ROOT
|
||||
|
||||
def resolvePath(self, path):
|
||||
return path.replace("@LOGDIR@", self.log_dir)\
|
||||
.replace("@CONFIGDIR@", self.local_config_dir)\
|
||||
.replace("@DEFAULTCONFIGDIR@", self.etc_defaults_dir)\
|
||||
.replace("@SYSTEMCONFIGDIR@", self.etc_dir)\
|
||||
.replace("@ROOTDIR@", self.root)\
|
||||
.replace("@DATADIR@", self.data_dir)\
|
||||
.replace("@KEYDIR@", self.key_dir)\
|
||||
.replace("@HOMEDIR@", self.home_dir)
|
||||
|
||||
def setCSVOutput(self, csv):
|
||||
self._csv = csv
|
||||
|
||||
def enableModule(self, name):
|
||||
runFile = os.path.join(self.root, "etc", "init", name + ".auto")
|
||||
if os.path.exists(runFile):
|
||||
print("%s is already enabled" % name)
|
||||
return 0
|
||||
try:
|
||||
open(runFile, 'w').close()
|
||||
print("enabled %s" % name)
|
||||
return 0
|
||||
except Exception as exc:
|
||||
sys.stderr.write(str(exc) + "\n")
|
||||
sys.stderr.flush()
|
||||
return 0
|
||||
|
||||
def disableModule(self, name):
|
||||
runFile = os.path.join(self.root, "etc", "init", name + ".auto")
|
||||
if not os.path.exists(runFile):
|
||||
print("%s is not enabled" % name)
|
||||
return 0
|
||||
try:
|
||||
os.remove(runFile)
|
||||
print("disabled %s" % name)
|
||||
except Exception as exc:
|
||||
sys.stderr.write(str(exc) + "\n")
|
||||
sys.stderr.flush()
|
||||
return 0
|
||||
|
||||
def isModuleEnabled(self, name):
|
||||
runFile = os.path.join(self.root, "etc", "init", name + ".auto")
|
||||
return os.path.exists(runFile) == True
|
||||
|
||||
# Return the module name from a path
|
||||
def moduleName(self, path):
|
||||
return os.path.splitext(os.path.basename(path))[0]
|
||||
|
||||
# Returns a module's lockfile
|
||||
def lockFile(self, module):
|
||||
return os.path.join(self.root, "var", "run", module + ".pid")
|
||||
|
||||
# Returns a module's runfile
|
||||
def runFile(self, module):
|
||||
return os.path.join(self.root, "var", "run", module + ".run")
|
||||
|
||||
# Returns a module's logfile
|
||||
def logFile(self, module):
|
||||
return os.path.join(self.root, "var", "log", module + ".log")
|
||||
|
||||
# Returns the binary file path of a given module name
|
||||
def binaryFile(self, module):
|
||||
# return os.path.join(self.root, "bin/" + module)
|
||||
return module
|
||||
|
||||
def start(self, module, binary, params, nohup=False):
|
||||
cmd = binary + " " + params + " >" + self.logFile(module) + " 2>&1"
|
||||
if nohup:
|
||||
cmd = "nohup " + cmd + " &"
|
||||
return os.system(cmd)
|
||||
|
||||
def stop(self, module, timeout):
|
||||
return self.killWait(module, timeout)
|
||||
|
||||
def tryLock(self, module, timeout = None):
|
||||
if timeout is None:
|
||||
return subprocess.call("trylock " + self.lockFile(module), shell=True) == 0
|
||||
else:
|
||||
try:
|
||||
timeoutSeconds = int(timeout)
|
||||
except:
|
||||
print("Invalid timeout parameter, expected positive integer")
|
||||
raise
|
||||
return subprocess.call("waitlock %d \"%s\"" % (timeoutSeconds, self.lockFile(module)), shell=True) == 0
|
||||
|
||||
def killWait(self, module, timeout):
|
||||
lockfile = self.lockFile(module)
|
||||
|
||||
# Open pid file
|
||||
f = open(lockfile, "r")
|
||||
|
||||
# Try to read the pid
|
||||
try:
|
||||
pid = int(f.readline())
|
||||
except:
|
||||
f.close()
|
||||
raise
|
||||
|
||||
# Kill process with pid
|
||||
subprocess.call("kill %d" % pid, shell=True)
|
||||
if subprocess.call("waitlock %d \"%s\"" % (timeout, lockfile), shell=True) != 0:
|
||||
print("timeout exceeded")
|
||||
subprocess.call("kill -9 %d" % pid, shell=True)
|
||||
|
||||
# Remove pid file
|
||||
try:
|
||||
os.remove(lockfile)
|
||||
except:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
def processTemplate(self, templateFile, paths, params, printError=False):
|
||||
self.last_template_file = None
|
||||
|
||||
for tp in paths:
|
||||
if os.path.exists(os.path.join(tp, templateFile)):
|
||||
break
|
||||
|
||||
else:
|
||||
if printError:
|
||||
print("Error: template %s not found" % templateFile)
|
||||
return ""
|
||||
|
||||
filename = os.path.join(tp, templateFile)
|
||||
self.last_template_file = filename
|
||||
|
||||
try:
|
||||
t = Template(open(filename).read())
|
||||
except:
|
||||
if printError:
|
||||
print("Error: template %s not readable" % filename)
|
||||
return ""
|
||||
|
||||
params['date'] = time.ctime()
|
||||
params['template'] = filename
|
||||
|
||||
while True:
|
||||
try:
|
||||
return t.substitute(params)
|
||||
|
||||
except KeyError as e:
|
||||
print("warning: $%s is not defined in %s" % (e.args[0], filename))
|
||||
params[e.args[0]] = ""
|
||||
|
||||
except ValueError as e:
|
||||
raise ValueError("%s: %s" % (filename, str(e)))
|
||||
|
||||
def logStatus(self, name, isRunning, shouldRun, isEnabled):
|
||||
if self._csv == False:
|
||||
sys.stdout.write("%-20s is " % name)
|
||||
if not isRunning:
|
||||
sys.stdout.write("not ")
|
||||
sys.stdout.write("running")
|
||||
if not isRunning and shouldRun:
|
||||
sys.stdout.write(" [WARNING]")
|
||||
sys.stdout.write("\n")
|
||||
else:
|
||||
sys.stdout.write("%s;%d;%d;%d\n" % (
|
||||
name, int(isRunning), int(shouldRun), int(isEnabled)))
|
||||
sys.stdout.flush()
|
||||
|
||||
def log(self, line):
|
||||
sys.stdout.write(line + "\n")
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
# The module interface which implementes the basic default operations.
|
||||
# Each script can define its own handlers to customize the behaviour.
|
||||
# Available handlers:
|
||||
# start()
|
||||
# stop()
|
||||
# check()
|
||||
# status(shouldRun)
|
||||
# setup(params = dict{name, values as []})
|
||||
# updateConfig()
|
||||
class Module:
|
||||
def __init__(self, env, name):
|
||||
self.env = env
|
||||
self.name = name
|
||||
# The start order
|
||||
self.order = 100
|
||||
# Defines if this is a kernel module or not.
|
||||
# Kernel modules are always started
|
||||
self.isKernelModule = False
|
||||
# Defines if this is a config only module
|
||||
self.isConfigModule = False
|
||||
# Set default timeout when stopping a module to 10 seconds before killing it
|
||||
self.killTimeout = 10
|
||||
# Set default timeout when reloading a module to 10 seconds
|
||||
self.reloadTimeout = 10
|
||||
|
||||
def _get_start_params(self):
|
||||
# Run as daemon
|
||||
params = "-D"
|
||||
|
||||
# Enable syslog if configured
|
||||
if self.env.syslog == True:
|
||||
params = params + "s"
|
||||
|
||||
params = params + " -l " + self.env.lockFile(self.name)
|
||||
return params
|
||||
|
||||
def _run(self):
|
||||
return self.env.start(self.name, self.env.binaryFile(self.name), self._get_start_params())
|
||||
|
||||
def isRunning(self):
|
||||
return self.env.tryLock(self.name) == False
|
||||
|
||||
def start(self):
|
||||
if self.isRunning():
|
||||
self.env.log("%s is already running" % self.name)
|
||||
return 1
|
||||
|
||||
self.env.log("starting %s" % self.name)
|
||||
return self._run()
|
||||
|
||||
def stop(self):
|
||||
if not self.isRunning():
|
||||
self.env.log("%s is not running" % self.name)
|
||||
return 1
|
||||
|
||||
self.env.log("shutting down %s" % self.name)
|
||||
# Default timeout to 10 seconds
|
||||
return self.env.stop(self.name, self.killTimeout)
|
||||
|
||||
def reload(self):
|
||||
self.env.log("reload not supported by %s" % self.name)
|
||||
return 1
|
||||
|
||||
# Check is the same as start. If a module should be checked
|
||||
# is decided by the control script which check the existence
|
||||
# of a corresponding run file.
|
||||
def check(self):
|
||||
return self.start()
|
||||
|
||||
def status(self, shouldRun):
|
||||
self.env.logStatus(self.name, self.isRunning(), shouldRun, self.env.isModuleEnabled(
|
||||
self.name) or isinstance(self, CoreModule))
|
||||
|
||||
def requiresKernelModules(self):
|
||||
# The default handler triggers a start of kernel modules before updating
|
||||
# its configuration
|
||||
return True
|
||||
|
||||
def updateConfigProxy(self):
|
||||
# This function must return either a string containing the module name
|
||||
# of the proxy module that should be configured as well or None.
|
||||
return None
|
||||
|
||||
def updateConfig(self):
|
||||
# This function must return a number indicating the error code where
|
||||
# 0 means no error. The default handler doesn't do anything.
|
||||
return 0
|
||||
|
||||
def printCrontab(self):
|
||||
# The default handler doesn't do anything
|
||||
return 0
|
||||
|
||||
def supportsAliases(self):
|
||||
# The default handler does not support aliases
|
||||
return False
|
||||
|
||||
|
||||
# Define a kernel core module which is started always
|
||||
class CoreModule(Module):
|
||||
def __init__(self, env, name):
|
||||
Module.__init__(self, env, name)
|
||||
0
lib/python/seiscomp/legacy/__init__.py
Normal file
0
lib/python/seiscomp/legacy/__init__.py
Normal file
6
lib/python/seiscomp/legacy/db/__init__.py
Normal file
6
lib/python/seiscomp/legacy/db/__init__.py
Normal file
@ -0,0 +1,6 @@
|
||||
from __future__ import (absolute_import, division, print_function,
|
||||
unicode_literals)
|
||||
|
||||
class DBError(Exception):
|
||||
pass
|
||||
|
||||
0
lib/python/seiscomp/legacy/db/seiscomp3/__init__.py
Normal file
0
lib/python/seiscomp/legacy/db/seiscomp3/__init__.py
Normal file
1149
lib/python/seiscomp/legacy/db/seiscomp3/inventory.py
Normal file
1149
lib/python/seiscomp/legacy/db/seiscomp3/inventory.py
Normal file
File diff suppressed because it is too large
Load Diff
6267
lib/python/seiscomp/legacy/db/seiscomp3/sc3wrap.py
Normal file
6267
lib/python/seiscomp/legacy/db/seiscomp3/sc3wrap.py
Normal file
File diff suppressed because it is too large
Load Diff
0
lib/python/seiscomp/legacy/db/xmlio/__init__.py
Normal file
0
lib/python/seiscomp/legacy/db/xmlio/__init__.py
Normal file
1037
lib/python/seiscomp/legacy/db/xmlio/inventory.py
Normal file
1037
lib/python/seiscomp/legacy/db/xmlio/inventory.py
Normal file
File diff suppressed because it is too large
Load Diff
4164
lib/python/seiscomp/legacy/db/xmlio/xmlwrap.py
Normal file
4164
lib/python/seiscomp/legacy/db/xmlio/xmlwrap.py
Normal file
File diff suppressed because it is too large
Load Diff
3006
lib/python/seiscomp/legacy/fseed.py
Normal file
3006
lib/python/seiscomp/legacy/fseed.py
Normal file
File diff suppressed because it is too large
Load Diff
221
lib/python/seiscomp/logging.py
Normal file
221
lib/python/seiscomp/logging.py
Normal file
@ -0,0 +1,221 @@
|
||||
# This file was automatically generated by SWIG (http://www.swig.org).
|
||||
# Version 4.0.2
|
||||
#
|
||||
# Do not make changes to this file unless you know what you are doing--modify
|
||||
# the SWIG interface file instead.
|
||||
|
||||
from sys import version_info as _swig_python_version_info
|
||||
if _swig_python_version_info < (2, 7, 0):
|
||||
raise RuntimeError("Python 2.7 or later required")
|
||||
|
||||
# Import the low-level C/C++ module
|
||||
if __package__ or "." in __name__:
|
||||
from . import _logging
|
||||
else:
|
||||
import _logging
|
||||
|
||||
try:
|
||||
import builtins as __builtin__
|
||||
except ImportError:
|
||||
import __builtin__
|
||||
|
||||
def _swig_repr(self):
|
||||
try:
|
||||
strthis = "proxy of " + self.this.__repr__()
|
||||
except __builtin__.Exception:
|
||||
strthis = ""
|
||||
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_instance_variable(set):
|
||||
def set_instance_attr(self, name, value):
|
||||
if name == "thisown":
|
||||
self.this.own(value)
|
||||
elif name == "this":
|
||||
set(self, name, value)
|
||||
elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
|
||||
set(self, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add instance attributes to %s" % self)
|
||||
return set_instance_attr
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_class_variable(set):
|
||||
def set_class_attr(cls, name, value):
|
||||
if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
|
||||
set(cls, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add class attributes to %s" % cls)
|
||||
return set_class_attr
|
||||
|
||||
|
||||
def _swig_add_metaclass(metaclass):
|
||||
"""Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
|
||||
def wrapper(cls):
|
||||
return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
|
||||
return wrapper
|
||||
|
||||
|
||||
class _SwigNonDynamicMeta(type):
|
||||
"""Meta class to enforce nondynamic attributes (no new attributes) for a class"""
|
||||
__setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
|
||||
|
||||
|
||||
SEISCOMP_COMPONENT = _logging.SEISCOMP_COMPONENT
|
||||
SEISCOMP_LOG_API_VERSION = _logging.SEISCOMP_LOG_API_VERSION
|
||||
LL_UNDEFINED = _logging.LL_UNDEFINED
|
||||
LL_CRITICAL = _logging.LL_CRITICAL
|
||||
LL_ERROR = _logging.LL_ERROR
|
||||
LL_WARNING = _logging.LL_WARNING
|
||||
LL_NOTICE = _logging.LL_NOTICE
|
||||
LL_INFO = _logging.LL_INFO
|
||||
LL_DEBUG = _logging.LL_DEBUG
|
||||
LL_QUANTITY = _logging.LL_QUANTITY
|
||||
SEISCOMP_LOGGING_CURRENT_FUNCTION = _logging.SEISCOMP_LOGGING_CURRENT_FUNCTION
|
||||
|
||||
def debug(*args):
|
||||
return _logging.debug(*args)
|
||||
|
||||
def info(*args):
|
||||
return _logging.info(*args)
|
||||
|
||||
def warning(*args):
|
||||
return _logging.warning(*args)
|
||||
|
||||
def error(*args):
|
||||
return _logging.error(*args)
|
||||
|
||||
def notice(*args):
|
||||
return _logging.notice(*args)
|
||||
|
||||
def log(*args):
|
||||
return _logging.log(*args)
|
||||
|
||||
def getAll():
|
||||
return _logging.getAll()
|
||||
|
||||
def getGlobalChannel(*args):
|
||||
return _logging.getGlobalChannel(*args)
|
||||
|
||||
def getComponentChannel(*args):
|
||||
return _logging.getComponentChannel(*args)
|
||||
|
||||
def getComponentAll(component):
|
||||
return _logging.getComponentAll(component)
|
||||
|
||||
def getComponentDebugs(component):
|
||||
return _logging.getComponentDebugs(component)
|
||||
|
||||
def getComponentInfos(component):
|
||||
return _logging.getComponentInfos(component)
|
||||
|
||||
def getComponentWarnings(component):
|
||||
return _logging.getComponentWarnings(component)
|
||||
|
||||
def getComponentErrors(component):
|
||||
return _logging.getComponentErrors(component)
|
||||
|
||||
def getComponentNotices(component):
|
||||
return _logging.getComponentNotices(component)
|
||||
|
||||
def consoleOutput():
|
||||
return _logging.consoleOutput()
|
||||
|
||||
def enableConsoleLogging(arg1):
|
||||
return _logging.enableConsoleLogging(arg1)
|
||||
|
||||
def disableConsoleLogging():
|
||||
return _logging.disableConsoleLogging()
|
||||
class Output(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
raise AttributeError("No constructor defined - class is abstract")
|
||||
__repr__ = _swig_repr
|
||||
__swig_destroy__ = _logging.delete_Output
|
||||
|
||||
def subscribe(self, channel):
|
||||
return _logging.Output_subscribe(self, channel)
|
||||
|
||||
def unsubscribe(self, channel):
|
||||
return _logging.Output_unsubscribe(self, channel)
|
||||
|
||||
def logComponent(self, e):
|
||||
return _logging.Output_logComponent(self, e)
|
||||
|
||||
def logContext(self, e):
|
||||
return _logging.Output_logContext(self, e)
|
||||
|
||||
def setUTCEnabled(self, e):
|
||||
return _logging.Output_setUTCEnabled(self, e)
|
||||
|
||||
# Register Output in _logging:
|
||||
_logging.Output_swigregister(Output)
|
||||
|
||||
class FdOutput(Output):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, fdOut=2):
|
||||
_logging.FdOutput_swiginit(self, _logging.new_FdOutput(fdOut))
|
||||
__swig_destroy__ = _logging.delete_FdOutput
|
||||
|
||||
# Register FdOutput in _logging:
|
||||
_logging.FdOutput_swigregister(FdOutput)
|
||||
|
||||
class FileOutput(Output):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_logging.FileOutput_swiginit(self, _logging.new_FileOutput(*args))
|
||||
__swig_destroy__ = _logging.delete_FileOutput
|
||||
|
||||
def open(self, filename):
|
||||
return _logging.FileOutput_open(self, filename)
|
||||
|
||||
def isOpen(self):
|
||||
return _logging.FileOutput_isOpen(self)
|
||||
|
||||
# Register FileOutput in _logging:
|
||||
_logging.FileOutput_swigregister(FileOutput)
|
||||
|
||||
class FileRotatorOutput(FileOutput):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_logging.FileRotatorOutput_swiginit(self, _logging.new_FileRotatorOutput(*args))
|
||||
|
||||
def open(self, filename):
|
||||
return _logging.FileRotatorOutput_open(self, filename)
|
||||
__swig_destroy__ = _logging.delete_FileRotatorOutput
|
||||
|
||||
# Register FileRotatorOutput in _logging:
|
||||
_logging.FileRotatorOutput_swigregister(FileRotatorOutput)
|
||||
|
||||
class SyslogOutput(Output):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_logging.SyslogOutput_swiginit(self, _logging.new_SyslogOutput(*args))
|
||||
__swig_destroy__ = _logging.delete_SyslogOutput
|
||||
|
||||
def facility(self):
|
||||
return _logging.SyslogOutput_facility(self)
|
||||
|
||||
def open(self, ident, facility=None):
|
||||
return _logging.SyslogOutput_open(self, ident, facility)
|
||||
|
||||
def isOpen(self):
|
||||
return _logging.SyslogOutput_isOpen(self)
|
||||
|
||||
def close(self):
|
||||
return _logging.SyslogOutput_close(self)
|
||||
|
||||
# Register SyslogOutput in _logging:
|
||||
_logging.SyslogOutput_swigregister(SyslogOutput)
|
||||
|
||||
|
||||
|
||||
54
lib/python/seiscomp/logs.py
Normal file
54
lib/python/seiscomp/logs.py
Normal file
@ -0,0 +1,54 @@
|
||||
#*****************************************************************************
|
||||
# logs.py
|
||||
#
|
||||
# SeisComP log handlers
|
||||
#
|
||||
# (c) 2005 Andres Heinloo, GFZ Potsdam
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 2, or (at your option) any later
|
||||
# version. For more information, see http://www.gnu.org/
|
||||
#*****************************************************************************
|
||||
|
||||
from __future__ import (absolute_import, division, print_function,
|
||||
unicode_literals)
|
||||
|
||||
import sys as _sys
|
||||
import traceback as _traceback
|
||||
|
||||
class _Logf(object):
|
||||
def write(self, s):
|
||||
error(s.rstrip())
|
||||
|
||||
def print_exc():
|
||||
_traceback.print_exc(file=_Logf())
|
||||
|
||||
# Default handlers, to be overridden by packages, eg.:
|
||||
#
|
||||
# def log_info(s):
|
||||
# print time.ctime() + " - trigger: " + s
|
||||
# sys.stdout.flush()
|
||||
#
|
||||
# seiscomp.logs.info = log_info
|
||||
|
||||
def debug(s):
|
||||
_sys.stdout.write(s + "\n")
|
||||
_sys.stdout.flush()
|
||||
|
||||
def info(s):
|
||||
_sys.stdout.write(s + "\n")
|
||||
_sys.stdout.flush()
|
||||
|
||||
def notice(s):
|
||||
_sys.stdout.write(s + "\n")
|
||||
_sys.stdout.flush()
|
||||
|
||||
def warning(s):
|
||||
_sys.stdout.write(s + "\n")
|
||||
_sys.stdout.flush()
|
||||
|
||||
def error(s):
|
||||
_sys.stdout.write(s + "\n")
|
||||
_sys.stdout.flush()
|
||||
|
||||
3139
lib/python/seiscomp/math.py
Normal file
3139
lib/python/seiscomp/math.py
Normal file
File diff suppressed because it is too large
Load Diff
491
lib/python/seiscomp/mseedlite.py
Normal file
491
lib/python/seiscomp/mseedlite.py
Normal file
@ -0,0 +1,491 @@
|
||||
"""Python-only Mini-SEED module with limited functionality.
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
any later version.
|
||||
|
||||
:Copyright:
|
||||
2005 Andres Heinloo, GEOFON, GFZ Potsdam <geofon@gfz-potsdam.de>
|
||||
:License:
|
||||
GPLv3
|
||||
:Platform:
|
||||
Linux
|
||||
|
||||
.. moduleauthor:: Andres Heinloo <andres@gfz-potsdam.de>, GEOFON, GFZ Potsdam
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import datetime
|
||||
import struct
|
||||
import sys
|
||||
from io import BytesIO
|
||||
|
||||
_FIXHEAD_LEN = 48
|
||||
_BLKHEAD_LEN = 4
|
||||
_BLK1000_LEN = 4
|
||||
_BLK1001_LEN = 4
|
||||
_MAX_RECLEN = 4096
|
||||
|
||||
_doy = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365)
|
||||
|
||||
|
||||
def _is_leap(y):
|
||||
"""True if y is a leap year."""
|
||||
return (y % 400 == 0) or (y % 4 == 0 and y % 100 != 0)
|
||||
|
||||
|
||||
def _ldoy(y, m):
|
||||
"""The day of the year of the first day of month m, in year y.
|
||||
|
||||
Note: for January, m=1; for December, m=12.
|
||||
Examples:
|
||||
_ldoy(1900, 4) = 90
|
||||
_ldoy(1900, 1) = 0
|
||||
_ldoy(1999, 4) = 90
|
||||
_ldoy(2004, 4) = 91
|
||||
_ldoy(2000, 4) = 91
|
||||
|
||||
"""
|
||||
return _doy[m - 1] + (_is_leap(y) and m >= 3)
|
||||
|
||||
|
||||
def _dy2mdy(doy, year):
|
||||
month = 1
|
||||
while doy > _ldoy(year, month + 1):
|
||||
month += 1
|
||||
|
||||
mday = doy - _ldoy(year, month)
|
||||
return (month, mday)
|
||||
|
||||
|
||||
def _mdy2dy(month, day, year):
|
||||
return _ldoy(year, month) + day
|
||||
|
||||
|
||||
class EndOfData(Exception):
|
||||
"""."""
|
||||
|
||||
|
||||
class MSeedError(Exception):
|
||||
"""."""
|
||||
|
||||
|
||||
class MSeedNoData(MSeedError):
|
||||
"""."""
|
||||
|
||||
|
||||
class Record(object):
|
||||
"""Mini-SEED record."""
|
||||
|
||||
def __init__(self, src):
|
||||
"""Create a Mini-SEED record from a file handle or a bitstream."""
|
||||
if type(src) == bytes:
|
||||
fd = BytesIO(src)
|
||||
elif hasattr(src, "read"):
|
||||
fd = src
|
||||
else:
|
||||
raise TypeError("argument is neither bytes nor a file object")
|
||||
|
||||
# self.header = ""
|
||||
self.header = bytes()
|
||||
fixhead = fd.read(_FIXHEAD_LEN)
|
||||
|
||||
if len(fixhead) == 0:
|
||||
# FIXME Check if there is no better option, but NOT StopIteration!
|
||||
raise EndOfData
|
||||
|
||||
if len(fixhead) < _FIXHEAD_LEN:
|
||||
raise MSeedError("unexpected end of header")
|
||||
|
||||
(
|
||||
recno_str,
|
||||
self.rectype,
|
||||
sta,
|
||||
loc,
|
||||
cha,
|
||||
net,
|
||||
bt_year,
|
||||
bt_doy,
|
||||
bt_hour,
|
||||
bt_minute,
|
||||
bt_second,
|
||||
bt_tms,
|
||||
self.nsamp,
|
||||
self.sr_factor,
|
||||
self.sr_mult,
|
||||
self.aflgs,
|
||||
self.cflgs,
|
||||
self.qflgs,
|
||||
self.__num_blk,
|
||||
self.time_correction,
|
||||
self.__pdata,
|
||||
self.__pblk,
|
||||
) = struct.unpack(">6scx5s2s3s2s2H3Bx2H2h4Bl2H", fixhead)
|
||||
|
||||
if sys.version_info[0] > 2:
|
||||
recno_str = recno_str.decode("utf-8")
|
||||
self.rectype = self.rectype.decode("utf-8")
|
||||
sta = sta.decode("utf-8")
|
||||
loc = loc.decode("utf-8")
|
||||
cha = cha.decode("utf-8")
|
||||
net = net.decode("utf-8")
|
||||
|
||||
self.header += fixhead
|
||||
|
||||
if self.rectype not in ("D", "R", "Q", "M"):
|
||||
fd.read(_MAX_RECLEN - _FIXHEAD_LEN)
|
||||
raise MSeedNoData("non-data record")
|
||||
|
||||
if self.__pdata >= _MAX_RECLEN:
|
||||
raise MSeedError(
|
||||
f"invalid pointer at {net.strip()}.{sta.strip()}.{loc.strip()}.{cha.strip()}: "
|
||||
f"record size ({self.__pdata}) >= {_MAX_RECLEN}"
|
||||
)
|
||||
if self.__pdata < _FIXHEAD_LEN or (
|
||||
self.__pblk != 0
|
||||
and ((self.__pblk < _FIXHEAD_LEN) or (self.__pblk >= self.__pdata))
|
||||
):
|
||||
raise MSeedError(
|
||||
f"invalid pointer at {net.strip()}.{sta.strip()}.{loc.strip()}.{cha.strip()}"
|
||||
)
|
||||
|
||||
if self.__pblk == 0:
|
||||
blklen = 0
|
||||
else:
|
||||
blklen = self.__pdata - self.__pblk
|
||||
gaplen = self.__pblk - _FIXHEAD_LEN
|
||||
gap = fd.read(gaplen)
|
||||
if len(gap) < gaplen:
|
||||
raise MSeedError("unexpected end of data")
|
||||
|
||||
self.header += gap
|
||||
|
||||
# defaults
|
||||
self.encoding = 11
|
||||
self.byteorder = 1
|
||||
rec_len_exp = 12
|
||||
self.time_quality = -1
|
||||
micros = 0
|
||||
self.nframes = None
|
||||
self.__rec_len_exp_idx = None
|
||||
self.__micros_idx = None
|
||||
self.__nframes_idx = None
|
||||
|
||||
pos = 0
|
||||
while pos < blklen:
|
||||
blkhead = fd.read(_BLKHEAD_LEN)
|
||||
if len(blkhead) < _BLKHEAD_LEN:
|
||||
raise MSeedError(f"unexpected end of blockettes at{pos}{len(blkhead)}")
|
||||
|
||||
(blktype, nextblk) = struct.unpack(">2H", blkhead)
|
||||
self.header += blkhead
|
||||
pos += _BLKHEAD_LEN
|
||||
|
||||
if blktype == 1000:
|
||||
blk1000 = fd.read(_BLK1000_LEN)
|
||||
if len(blk1000) < _BLK1000_LEN:
|
||||
raise MSeedError(
|
||||
f"unexpected end of blockettes at {pos}{len(blk1000)}"
|
||||
)
|
||||
|
||||
(self.encoding, self.byteorder, rec_len_exp) = struct.unpack(
|
||||
">3Bx", blk1000
|
||||
)
|
||||
|
||||
self.__rec_len_exp_idx = self.__pblk + pos + 2
|
||||
self.header += blk1000
|
||||
pos += _BLK1000_LEN
|
||||
|
||||
elif blktype == 1001:
|
||||
blk1001 = fd.read(_BLK1001_LEN)
|
||||
if len(blk1001) < _BLK1001_LEN:
|
||||
raise MSeedError(
|
||||
f"unexpected end of blockettes at {pos}{len(blk1001)}"
|
||||
)
|
||||
|
||||
(self.time_quality, micros, self.nframes) = struct.unpack(
|
||||
">BbxB", blk1001
|
||||
)
|
||||
|
||||
self.__micros_idx = self.__pblk + pos + 1
|
||||
self.__nframes_idx = self.__pblk + pos + 3
|
||||
self.header += blk1001
|
||||
pos += _BLK1001_LEN
|
||||
|
||||
if nextblk == 0:
|
||||
break
|
||||
|
||||
if nextblk < self.__pblk + pos or nextblk >= self.__pdata:
|
||||
raise MSeedError("invalid pointers")
|
||||
|
||||
gaplen = nextblk - (self.__pblk + pos)
|
||||
gap = fd.read(gaplen)
|
||||
if len(gap) < gaplen:
|
||||
raise MSeedError("unexpected end of data")
|
||||
|
||||
self.header += gap
|
||||
pos += gaplen
|
||||
|
||||
if pos > blklen:
|
||||
raise MSeedError("corrupt record")
|
||||
|
||||
gaplen = self.__pdata - len(self.header)
|
||||
gap = fd.read(gaplen)
|
||||
if len(gap) < gaplen:
|
||||
raise MSeedError("unexpected end of data")
|
||||
|
||||
self.header += gap
|
||||
pos += gaplen
|
||||
|
||||
self.recno = int(recno_str)
|
||||
self.net = net.strip()
|
||||
self.sta = sta.strip()
|
||||
self.loc = loc.strip()
|
||||
self.cha = cha.strip()
|
||||
|
||||
if (self.sr_factor > 0) and (self.sr_mult > 0):
|
||||
self.samprate_num = self.sr_factor * self.sr_mult
|
||||
self.samprate_denom = 1
|
||||
elif (self.sr_factor > 0) and (self.sr_mult < 0):
|
||||
self.samprate_num = self.sr_factor
|
||||
self.samprate_denom = -self.sr_mult
|
||||
elif (self.sr_factor < 0) and (self.sr_mult > 0):
|
||||
self.samprate_num = self.sr_mult
|
||||
self.samprate_denom = -self.sr_factor
|
||||
elif (self.sr_factor < 0) and (self.sr_mult < 0):
|
||||
self.samprate_num = 1
|
||||
self.samprate_denom = self.sr_factor * self.sr_mult
|
||||
else:
|
||||
self.samprate_num = 0
|
||||
self.samprate_denom = 1
|
||||
|
||||
self.fsamp = float(self.samprate_num) / float(self.samprate_denom)
|
||||
|
||||
# quick fix to avoid exception from datetime
|
||||
if bt_second > 59:
|
||||
self.leap = bt_second - 59
|
||||
bt_second = 59
|
||||
else:
|
||||
self.leap = 0
|
||||
|
||||
try:
|
||||
(month, day) = _dy2mdy(bt_doy, bt_year)
|
||||
self.begin_time = datetime.datetime(
|
||||
bt_year, month, day, bt_hour, bt_minute, bt_second
|
||||
)
|
||||
|
||||
self.begin_time += datetime.timedelta(microseconds=bt_tms * 100 + micros)
|
||||
|
||||
if (self.nsamp != 0) and (self.fsamp != 0):
|
||||
msAux = 1000000 * self.nsamp / self.fsamp
|
||||
self.end_time = self.begin_time + datetime.timedelta(microseconds=msAux)
|
||||
else:
|
||||
self.end_time = self.begin_time
|
||||
|
||||
except ValueError as e:
|
||||
raise MSeedError(f"invalid time: {str(e)}")
|
||||
|
||||
self.size = 1 << rec_len_exp
|
||||
if (self.size < len(self.header)) or (self.size > _MAX_RECLEN):
|
||||
raise MSeedError("invalid record size")
|
||||
|
||||
datalen = self.size - self.__pdata
|
||||
self.data = fd.read(datalen)
|
||||
if len(self.data) < datalen:
|
||||
raise MSeedError("unexpected end of data")
|
||||
|
||||
if len(self.header) + len(self.data) != self.size:
|
||||
raise MSeedError("internal error")
|
||||
|
||||
(self.X0, self.Xn) = struct.unpack(">ll", self.data[4:12])
|
||||
|
||||
(w0,) = struct.unpack(">L", self.data[:4])
|
||||
(w3,) = struct.unpack(">L", self.data[12:16])
|
||||
c3 = (w0 >> 24) & 0x3
|
||||
d0 = None
|
||||
|
||||
if self.encoding == 10:
|
||||
# """STEIM (1) Compression?"""
|
||||
if c3 == 1:
|
||||
d0 = (w3 >> 24) & 0xFF
|
||||
if d0 > 0x7F:
|
||||
d0 -= 0x100
|
||||
elif c3 == 2:
|
||||
d0 = (w3 >> 16) & 0xFFFF
|
||||
if d0 > 0x7FFF:
|
||||
d0 -= 0x10000
|
||||
elif c3 == 3:
|
||||
d0 = w3 & 0xFFFFFFFF
|
||||
if d0 > 0x7FFFFFFF:
|
||||
d0 -= 0xFFFFFFFF
|
||||
d0 -= 1
|
||||
|
||||
elif self.encoding == 11:
|
||||
# """STEIM (2) Compression?"""
|
||||
if c3 == 1:
|
||||
d0 = (w3 >> 24) & 0xFF
|
||||
if d0 > 0x7F:
|
||||
d0 -= 0x100
|
||||
elif c3 == 2:
|
||||
dnib = (w3 >> 30) & 0x3
|
||||
if dnib == 1:
|
||||
d0 = w3 & 0x3FFFFFFF
|
||||
if d0 > 0x1FFFFFFF:
|
||||
d0 -= 0x40000000
|
||||
elif dnib == 2:
|
||||
d0 = (w3 >> 15) & 0x7FFF
|
||||
if d0 > 0x3FFF:
|
||||
d0 -= 0x8000
|
||||
elif dnib == 3:
|
||||
d0 = (w3 >> 20) & 0x3FF
|
||||
if d0 > 0x1FF:
|
||||
d0 -= 0x400
|
||||
elif c3 == 3:
|
||||
dnib = (w3 >> 30) & 0x3
|
||||
if dnib == 0:
|
||||
d0 = (w3 >> 24) & 0x3F
|
||||
if d0 > 0x1F:
|
||||
d0 -= 0x40
|
||||
elif dnib == 1:
|
||||
d0 = (w3 >> 25) & 0x1F
|
||||
if d0 > 0xF:
|
||||
d0 -= 0x20
|
||||
elif dnib == 2:
|
||||
d0 = (w3 >> 24) & 0xF
|
||||
if d0 > 0x7:
|
||||
d0 -= 0x10
|
||||
|
||||
if d0 is not None:
|
||||
self.X_minus1 = self.X0 - d0
|
||||
else:
|
||||
self.X_minus1 = None
|
||||
|
||||
if (self.nframes is None) or (self.nframes == 0):
|
||||
i = 0
|
||||
self.nframes = 0
|
||||
while i < len(self.data):
|
||||
if self.data[i] == "\0":
|
||||
break
|
||||
|
||||
i += 64
|
||||
self.nframes += 1
|
||||
|
||||
def merge(self, rec):
|
||||
"""Caller is expected to check for contiguity of data.
|
||||
|
||||
Check if rec.nframes * 64 <= len(data)?
|
||||
"""
|
||||
(self.Xn,) = struct.unpack(">l", rec.data[8:12])
|
||||
self.data += rec.data[: rec.nframes * 64]
|
||||
self.nframes += rec.nframes
|
||||
self.nsamp += rec.nsamp
|
||||
self.size = len(self.header) + len(self.data)
|
||||
self.end_time = rec.end_time
|
||||
|
||||
def write(self, fd, rec_len_exp):
|
||||
"""Write the record to an already opened file."""
|
||||
if self.size > (1 << rec_len_exp):
|
||||
raise MSeedError(
|
||||
f"record is larger than requested write size: {self.size} > {1 << rec_len_exp}"
|
||||
)
|
||||
|
||||
recno_str = bytes(("%06d" % (self.recno,)).encode("utf-8"))
|
||||
sta = bytes(("%-5.5s" % (self.sta,)).encode("utf-8"))
|
||||
loc = bytes(("%-2.2s" % (self.loc,)).encode("utf-8"))
|
||||
cha = bytes(("%-3.3s" % (self.cha,)).encode("utf-8"))
|
||||
net = bytes(("%-2.2s" % (self.net,)).encode("utf-8"))
|
||||
bt_year = self.begin_time.year
|
||||
bt_doy = _mdy2dy(
|
||||
self.begin_time.month, self.begin_time.day, self.begin_time.year
|
||||
)
|
||||
bt_hour = self.begin_time.hour
|
||||
bt_minute = self.begin_time.minute
|
||||
bt_second = self.begin_time.second + self.leap
|
||||
bt_tms = self.begin_time.microsecond // 100
|
||||
micros = self.begin_time.microsecond % 100
|
||||
|
||||
# This is just to make it Python 2 AND 3 compatible (str vs. bytes)
|
||||
rectype = (
|
||||
self.rectype.encode("utf-8") if sys.version_info[0] > 2 else self.rectype
|
||||
)
|
||||
|
||||
buf = struct.pack(
|
||||
">6s2c5s2s3s2s2H3Bx2H2h4Bl2H",
|
||||
recno_str,
|
||||
rectype,
|
||||
b" ",
|
||||
sta,
|
||||
loc,
|
||||
cha,
|
||||
net,
|
||||
bt_year,
|
||||
bt_doy,
|
||||
bt_hour,
|
||||
bt_minute,
|
||||
bt_second,
|
||||
bt_tms,
|
||||
self.nsamp,
|
||||
self.sr_factor,
|
||||
self.sr_mult,
|
||||
self.aflgs,
|
||||
self.cflgs,
|
||||
self.qflgs,
|
||||
self.__num_blk,
|
||||
self.time_correction,
|
||||
self.__pdata,
|
||||
self.__pblk,
|
||||
)
|
||||
fd.write(buf)
|
||||
|
||||
buf = list(self.header[_FIXHEAD_LEN:])
|
||||
|
||||
if self.__rec_len_exp_idx is not None:
|
||||
buf[self.__rec_len_exp_idx - _FIXHEAD_LEN] = struct.pack(">B", rec_len_exp)
|
||||
|
||||
if self.__micros_idx is not None:
|
||||
buf[self.__micros_idx - _FIXHEAD_LEN] = struct.pack(">b", micros)
|
||||
|
||||
if self.__nframes_idx is not None:
|
||||
buf[self.__nframes_idx - _FIXHEAD_LEN] = struct.pack(">B", self.nframes)
|
||||
|
||||
ba = bytearray()
|
||||
for b in buf:
|
||||
try:
|
||||
ba.append(b)
|
||||
except Exception:
|
||||
ba.append(int.from_bytes(b, byteorder="big"))
|
||||
fd.write(ba)
|
||||
|
||||
buf = (
|
||||
self.data[:4]
|
||||
+ struct.pack(">ll", self.X0, self.Xn)
|
||||
+ self.data[12:]
|
||||
+ ((1 << rec_len_exp) - self.size) * b"\0"
|
||||
)
|
||||
|
||||
fd.write(buf)
|
||||
|
||||
|
||||
class Input(object):
|
||||
"""Iterate over the available Mini-SEED records."""
|
||||
|
||||
def __init__(self, fd):
|
||||
"""Create the iterable from the file handle passed as parameter."""
|
||||
self.__fd = fd
|
||||
|
||||
def __iter__(self):
|
||||
"""Define the iterator."""
|
||||
while True:
|
||||
try:
|
||||
yield Record(self.__fd)
|
||||
|
||||
except EndOfData:
|
||||
# This change follows new PEP-479, where it is explicitly forbidden to
|
||||
# use StopIteration
|
||||
# raise StopIteration
|
||||
return
|
||||
|
||||
except MSeedNoData:
|
||||
pass
|
||||
187
lib/python/seiscomp/myconfig.py
Normal file
187
lib/python/seiscomp/myconfig.py
Normal file
@ -0,0 +1,187 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
import xml.dom.minidom
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
from configparser import RawConfigParser
|
||||
else:
|
||||
from ConfigParser import RawConfigParser
|
||||
|
||||
def readConfig(fileName):
|
||||
cp = RawConfigParser()
|
||||
fp = open(fileName, 'r')
|
||||
if sys.version_info < (3, 2):
|
||||
cp.readfp(fp) # pylint: disable=W1505
|
||||
else:
|
||||
cp.read_file(fp, fileName)
|
||||
return cp
|
||||
|
||||
|
||||
def parseXMLnode(root):
|
||||
"""
|
||||
Parses an XML tree starting from root node and returns a list of
|
||||
tuples containing name, attributes and content of all child nodes.
|
||||
"""
|
||||
|
||||
nodes = []
|
||||
|
||||
if root.hasChildNodes():
|
||||
for node in [ node for node in root.childNodes
|
||||
if node.nodeType==node.ELEMENT_NODE ]:
|
||||
|
||||
ncn = len(node.childNodes)
|
||||
if ncn==0:
|
||||
content = None
|
||||
elif ncn==1 and node.firstChild.nodeValue:
|
||||
content = node.firstChild.nodeValue.strip()
|
||||
else:
|
||||
content = parseXMLnode(node)
|
||||
|
||||
attrs = {}
|
||||
if node.hasAttributes():
|
||||
for i in range(node.attributes.length):
|
||||
attr = node.attributes.item(i)
|
||||
name = attr.nodeName
|
||||
attrs[name] = attr.nodeValue.strip()
|
||||
|
||||
nodes.append((node.nodeName, attrs, content))
|
||||
|
||||
return nodes
|
||||
|
||||
def parseXMLfile(f):
|
||||
root = xml.dom.minidom.parse(f)
|
||||
x = parseXMLnode(root)
|
||||
if len(x)==1:
|
||||
return x[0]
|
||||
# else not 1 root element, but that's cought by xml.dom.minidom.parse()
|
||||
|
||||
class MyConfig(dict):
|
||||
|
||||
def __init__(self, filename):
|
||||
|
||||
if filename[-4:].lower() == ".ini":
|
||||
self.readINI(filename)
|
||||
elif filename[-4:].lower() == ".xml":
|
||||
self.readXML(filename)
|
||||
else: print("XXXXXXXXXXXXXXX")
|
||||
|
||||
def readINI(self, filename):
|
||||
config = readConfig(filename)
|
||||
|
||||
for sec in config.sections():
|
||||
d = self[sec] = {}
|
||||
for opt in config.options(sec):
|
||||
d[opt] = config.get(sec, opt)
|
||||
|
||||
def readXML(self, filename):
|
||||
# XXX XML support is only provided for testing.
|
||||
name, attrs, content = parseXMLfile(filename)
|
||||
assert name == "config"
|
||||
for name, attrs, content in content:
|
||||
assert "name" in attrs
|
||||
sec = attrs["name"]
|
||||
assert name == "section"
|
||||
d = self[sec] = {}
|
||||
|
||||
for name, attrs, content in content:
|
||||
if isinstance(content, list):
|
||||
raise TypeError("<%s> elements can't have children" % name)
|
||||
|
||||
if name == "string":
|
||||
tmp = str(content)
|
||||
elif name == "int":
|
||||
tmp = int(content)
|
||||
elif name == "float":
|
||||
tmp = float(content)
|
||||
else:
|
||||
raise NameError("illegal tag '%s'" % name)
|
||||
|
||||
if not "name" in attrs:
|
||||
raise NameError("missing 'name' attribute in <%s>" % name)
|
||||
opt = attrs["name"]
|
||||
d[opt] = tmp
|
||||
|
||||
|
||||
class ConfigINI(dict):
|
||||
|
||||
def __init__(self, filename, mandatory=None):
|
||||
self.read(filename)
|
||||
if not isinstance(mandatory,list):
|
||||
mandatory = []
|
||||
self.mandatory = mandatory
|
||||
|
||||
def read(self, filename):
|
||||
config = readConfig(filename)
|
||||
|
||||
for sec in config.sections():
|
||||
d = self[sec] = {}
|
||||
for opt in config.options(sec):
|
||||
d[opt] = config.get(sec, opt)
|
||||
|
||||
def fillDefault(self, defaultSection="default"):
|
||||
default = self[defaultSection]
|
||||
|
||||
for section in self:
|
||||
if section == defaultSection:
|
||||
continue
|
||||
|
||||
# for missing items, use the default
|
||||
for item in default:
|
||||
if item not in self[section]:
|
||||
self[section][item] = default[item]
|
||||
|
||||
# # check for items that don't appear in the default
|
||||
# for item in self[section]:
|
||||
# if item not in default and item not in self.mandatory:
|
||||
# msg("[%s]: unmatched item '%s'" % (section, item))
|
||||
|
||||
for item in self.mandatory:
|
||||
if item not in self[section]:
|
||||
msg("[%s]: missing item '%s'" % (section, item))
|
||||
# XXX this could also be treated as a fatal error
|
||||
|
||||
|
||||
class ConfigXML(MyConfig):
|
||||
|
||||
def __init__(self, filename):
|
||||
self.read(filename)
|
||||
|
||||
def read(self, filename):
|
||||
# XXX XML support is only provided for testing.
|
||||
name, attrs, content = parseXMLfile(filename)
|
||||
assert name == "config"
|
||||
for name, attrs, content in content:
|
||||
assert "name" in attrs
|
||||
sec = attrs["name"]
|
||||
assert name == "section"
|
||||
d = self[sec] = {}
|
||||
|
||||
for name, attrs, content in content:
|
||||
if isinstance(content, list):
|
||||
raise TypeError("<%s> elements can't have children" % name)
|
||||
|
||||
if name == "string":
|
||||
tmp = str(content)
|
||||
elif name == "int":
|
||||
tmp = int(content)
|
||||
elif name == "float":
|
||||
tmp = float(content)
|
||||
else:
|
||||
raise NameError("illegal tag '%s'" % name)
|
||||
|
||||
if not "name" in attrs:
|
||||
raise NameError("missing 'name' attribute in <%s>" % name)
|
||||
opt = attrs["name"]
|
||||
d[opt] = tmp
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
for f in "test.ini", "test.xml":
|
||||
print("#### filename=", f)
|
||||
config = MyConfig(f)
|
||||
print(config)
|
||||
for section in config:
|
||||
print(section, config[section])
|
||||
|
||||
1592
lib/python/seiscomp/scbulletin.py
Normal file
1592
lib/python/seiscomp/scbulletin.py
Normal file
File diff suppressed because it is too large
Load Diff
899
lib/python/seiscomp/seismology.py
Normal file
899
lib/python/seiscomp/seismology.py
Normal file
@ -0,0 +1,899 @@
|
||||
# This file was automatically generated by SWIG (http://www.swig.org).
|
||||
# Version 4.0.2
|
||||
#
|
||||
# Do not make changes to this file unless you know what you are doing--modify
|
||||
# the SWIG interface file instead.
|
||||
|
||||
"""Codes for various seismological computations"""
|
||||
|
||||
from sys import version_info as _swig_python_version_info
|
||||
if _swig_python_version_info < (2, 7, 0):
|
||||
raise RuntimeError("Python 2.7 or later required")
|
||||
|
||||
# Import the low-level C/C++ module
|
||||
if __package__ or "." in __name__:
|
||||
from . import _seismology
|
||||
else:
|
||||
import _seismology
|
||||
|
||||
try:
|
||||
import builtins as __builtin__
|
||||
except ImportError:
|
||||
import __builtin__
|
||||
|
||||
def _swig_repr(self):
|
||||
try:
|
||||
strthis = "proxy of " + self.this.__repr__()
|
||||
except __builtin__.Exception:
|
||||
strthis = ""
|
||||
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_instance_variable(set):
|
||||
def set_instance_attr(self, name, value):
|
||||
if name == "thisown":
|
||||
self.this.own(value)
|
||||
elif name == "this":
|
||||
set(self, name, value)
|
||||
elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
|
||||
set(self, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add instance attributes to %s" % self)
|
||||
return set_instance_attr
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_class_variable(set):
|
||||
def set_class_attr(cls, name, value):
|
||||
if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
|
||||
set(cls, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add class attributes to %s" % cls)
|
||||
return set_class_attr
|
||||
|
||||
|
||||
def _swig_add_metaclass(metaclass):
|
||||
"""Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
|
||||
def wrapper(cls):
|
||||
return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
|
||||
return wrapper
|
||||
|
||||
|
||||
class _SwigNonDynamicMeta(type):
|
||||
"""Meta class to enforce nondynamic attributes (no new attributes) for a class"""
|
||||
__setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
|
||||
|
||||
|
||||
import weakref
|
||||
|
||||
class SwigPyIterator(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
raise AttributeError("No constructor defined - class is abstract")
|
||||
__repr__ = _swig_repr
|
||||
__swig_destroy__ = _seismology.delete_SwigPyIterator
|
||||
|
||||
def value(self):
|
||||
return _seismology.SwigPyIterator_value(self)
|
||||
|
||||
def incr(self, n=1):
|
||||
return _seismology.SwigPyIterator_incr(self, n)
|
||||
|
||||
def decr(self, n=1):
|
||||
return _seismology.SwigPyIterator_decr(self, n)
|
||||
|
||||
def distance(self, x):
|
||||
return _seismology.SwigPyIterator_distance(self, x)
|
||||
|
||||
def equal(self, x):
|
||||
return _seismology.SwigPyIterator_equal(self, x)
|
||||
|
||||
def copy(self):
|
||||
return _seismology.SwigPyIterator_copy(self)
|
||||
|
||||
def next(self):
|
||||
return _seismology.SwigPyIterator_next(self)
|
||||
|
||||
def __next__(self):
|
||||
return _seismology.SwigPyIterator___next__(self)
|
||||
|
||||
def previous(self):
|
||||
return _seismology.SwigPyIterator_previous(self)
|
||||
|
||||
def advance(self, n):
|
||||
return _seismology.SwigPyIterator_advance(self, n)
|
||||
|
||||
def __eq__(self, x):
|
||||
return _seismology.SwigPyIterator___eq__(self, x)
|
||||
|
||||
def __ne__(self, x):
|
||||
return _seismology.SwigPyIterator___ne__(self, x)
|
||||
|
||||
def __iadd__(self, n):
|
||||
return _seismology.SwigPyIterator___iadd__(self, n)
|
||||
|
||||
def __isub__(self, n):
|
||||
return _seismology.SwigPyIterator___isub__(self, n)
|
||||
|
||||
def __add__(self, n):
|
||||
return _seismology.SwigPyIterator___add__(self, n)
|
||||
|
||||
def __sub__(self, *args):
|
||||
return _seismology.SwigPyIterator___sub__(self, *args)
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
# Register SwigPyIterator in _seismology:
|
||||
_seismology.SwigPyIterator_swigregister(SwigPyIterator)
|
||||
|
||||
import seiscomp.io
|
||||
import seiscomp.math
|
||||
import seiscomp.core
|
||||
import seiscomp.datamodel
|
||||
import seiscomp.geo
|
||||
import seiscomp.config
|
||||
class TravelTimeList_internal(object):
|
||||
r"""Proxy of C++ std::list< Seiscomp::TravelTime > class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def iterator(self):
|
||||
r"""iterator(TravelTimeList_internal self) -> SwigPyIterator"""
|
||||
return _seismology.TravelTimeList_internal_iterator(self)
|
||||
def __iter__(self):
|
||||
return self.iterator()
|
||||
|
||||
def __nonzero__(self):
|
||||
r"""__nonzero__(TravelTimeList_internal self) -> bool"""
|
||||
return _seismology.TravelTimeList_internal___nonzero__(self)
|
||||
|
||||
def __bool__(self):
|
||||
r"""__bool__(TravelTimeList_internal self) -> bool"""
|
||||
return _seismology.TravelTimeList_internal___bool__(self)
|
||||
|
||||
def __len__(self):
|
||||
r"""__len__(TravelTimeList_internal self) -> std::list< Seiscomp::TravelTime >::size_type"""
|
||||
return _seismology.TravelTimeList_internal___len__(self)
|
||||
|
||||
def __getslice__(self, i, j):
|
||||
r"""__getslice__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::difference_type i, std::list< Seiscomp::TravelTime >::difference_type j) -> TravelTimeList_internal"""
|
||||
return _seismology.TravelTimeList_internal___getslice__(self, i, j)
|
||||
|
||||
def __setslice__(self, *args):
|
||||
r"""
|
||||
__setslice__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::difference_type i, std::list< Seiscomp::TravelTime >::difference_type j)
|
||||
__setslice__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::difference_type i, std::list< Seiscomp::TravelTime >::difference_type j, TravelTimeList_internal v)
|
||||
"""
|
||||
return _seismology.TravelTimeList_internal___setslice__(self, *args)
|
||||
|
||||
def __delslice__(self, i, j):
|
||||
r"""__delslice__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::difference_type i, std::list< Seiscomp::TravelTime >::difference_type j)"""
|
||||
return _seismology.TravelTimeList_internal___delslice__(self, i, j)
|
||||
|
||||
def __delitem__(self, *args):
|
||||
r"""
|
||||
__delitem__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::difference_type i)
|
||||
__delitem__(TravelTimeList_internal self, PySliceObject * slice)
|
||||
"""
|
||||
return _seismology.TravelTimeList_internal___delitem__(self, *args)
|
||||
|
||||
def __getitem__(self, *args):
|
||||
r"""
|
||||
__getitem__(TravelTimeList_internal self, PySliceObject * slice) -> TravelTimeList_internal
|
||||
__getitem__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::difference_type i) -> TravelTime
|
||||
"""
|
||||
return _seismology.TravelTimeList_internal___getitem__(self, *args)
|
||||
|
||||
def __setitem__(self, *args):
|
||||
r"""
|
||||
__setitem__(TravelTimeList_internal self, PySliceObject * slice, TravelTimeList_internal v)
|
||||
__setitem__(TravelTimeList_internal self, PySliceObject * slice)
|
||||
__setitem__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::difference_type i, TravelTime x)
|
||||
"""
|
||||
return _seismology.TravelTimeList_internal___setitem__(self, *args)
|
||||
|
||||
def pop(self):
|
||||
r"""pop(TravelTimeList_internal self) -> TravelTime"""
|
||||
return _seismology.TravelTimeList_internal_pop(self)
|
||||
|
||||
def append(self, x):
|
||||
r"""append(TravelTimeList_internal self, TravelTime x)"""
|
||||
return _seismology.TravelTimeList_internal_append(self, x)
|
||||
|
||||
def empty(self):
|
||||
r"""empty(TravelTimeList_internal self) -> bool"""
|
||||
return _seismology.TravelTimeList_internal_empty(self)
|
||||
|
||||
def size(self):
|
||||
r"""size(TravelTimeList_internal self) -> std::list< Seiscomp::TravelTime >::size_type"""
|
||||
return _seismology.TravelTimeList_internal_size(self)
|
||||
|
||||
def swap(self, v):
|
||||
r"""swap(TravelTimeList_internal self, TravelTimeList_internal v)"""
|
||||
return _seismology.TravelTimeList_internal_swap(self, v)
|
||||
|
||||
def begin(self):
|
||||
r"""begin(TravelTimeList_internal self) -> std::list< Seiscomp::TravelTime >::iterator"""
|
||||
return _seismology.TravelTimeList_internal_begin(self)
|
||||
|
||||
def end(self):
|
||||
r"""end(TravelTimeList_internal self) -> std::list< Seiscomp::TravelTime >::iterator"""
|
||||
return _seismology.TravelTimeList_internal_end(self)
|
||||
|
||||
def rbegin(self):
|
||||
r"""rbegin(TravelTimeList_internal self) -> std::list< Seiscomp::TravelTime >::reverse_iterator"""
|
||||
return _seismology.TravelTimeList_internal_rbegin(self)
|
||||
|
||||
def rend(self):
|
||||
r"""rend(TravelTimeList_internal self) -> std::list< Seiscomp::TravelTime >::reverse_iterator"""
|
||||
return _seismology.TravelTimeList_internal_rend(self)
|
||||
|
||||
def clear(self):
|
||||
r"""clear(TravelTimeList_internal self)"""
|
||||
return _seismology.TravelTimeList_internal_clear(self)
|
||||
|
||||
def get_allocator(self):
|
||||
r"""get_allocator(TravelTimeList_internal self) -> std::list< Seiscomp::TravelTime >::allocator_type"""
|
||||
return _seismology.TravelTimeList_internal_get_allocator(self)
|
||||
|
||||
def pop_back(self):
|
||||
r"""pop_back(TravelTimeList_internal self)"""
|
||||
return _seismology.TravelTimeList_internal_pop_back(self)
|
||||
|
||||
def erase(self, *args):
|
||||
r"""
|
||||
erase(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::iterator pos) -> std::list< Seiscomp::TravelTime >::iterator
|
||||
erase(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::iterator first, std::list< Seiscomp::TravelTime >::iterator last) -> std::list< Seiscomp::TravelTime >::iterator
|
||||
"""
|
||||
return _seismology.TravelTimeList_internal_erase(self, *args)
|
||||
|
||||
def __init__(self, *args):
|
||||
r"""
|
||||
__init__(TravelTimeList_internal self) -> TravelTimeList_internal
|
||||
__init__(TravelTimeList_internal self, TravelTimeList_internal other) -> TravelTimeList_internal
|
||||
__init__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::size_type size) -> TravelTimeList_internal
|
||||
__init__(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::size_type size, TravelTime value) -> TravelTimeList_internal
|
||||
"""
|
||||
_seismology.TravelTimeList_internal_swiginit(self, _seismology.new_TravelTimeList_internal(*args))
|
||||
|
||||
def push_back(self, x):
|
||||
r"""push_back(TravelTimeList_internal self, TravelTime x)"""
|
||||
return _seismology.TravelTimeList_internal_push_back(self, x)
|
||||
|
||||
def front(self):
|
||||
r"""front(TravelTimeList_internal self) -> TravelTime"""
|
||||
return _seismology.TravelTimeList_internal_front(self)
|
||||
|
||||
def back(self):
|
||||
r"""back(TravelTimeList_internal self) -> TravelTime"""
|
||||
return _seismology.TravelTimeList_internal_back(self)
|
||||
|
||||
def assign(self, n, x):
|
||||
r"""assign(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::size_type n, TravelTime x)"""
|
||||
return _seismology.TravelTimeList_internal_assign(self, n, x)
|
||||
|
||||
def resize(self, *args):
|
||||
r"""
|
||||
resize(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::size_type new_size)
|
||||
resize(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::size_type new_size, TravelTime x)
|
||||
"""
|
||||
return _seismology.TravelTimeList_internal_resize(self, *args)
|
||||
|
||||
def insert(self, *args):
|
||||
r"""
|
||||
insert(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::iterator pos, TravelTime x) -> std::list< Seiscomp::TravelTime >::iterator
|
||||
insert(TravelTimeList_internal self, std::list< Seiscomp::TravelTime >::iterator pos, std::list< Seiscomp::TravelTime >::size_type n, TravelTime x)
|
||||
"""
|
||||
return _seismology.TravelTimeList_internal_insert(self, *args)
|
||||
|
||||
def pop_front(self):
|
||||
r"""pop_front(TravelTimeList_internal self)"""
|
||||
return _seismology.TravelTimeList_internal_pop_front(self)
|
||||
|
||||
def push_front(self, x):
|
||||
r"""push_front(TravelTimeList_internal self, TravelTime x)"""
|
||||
return _seismology.TravelTimeList_internal_push_front(self, x)
|
||||
|
||||
def reverse(self):
|
||||
r"""reverse(TravelTimeList_internal self)"""
|
||||
return _seismology.TravelTimeList_internal_reverse(self)
|
||||
__swig_destroy__ = _seismology.delete_TravelTimeList_internal
|
||||
|
||||
# Register TravelTimeList_internal in _seismology:
|
||||
_seismology.TravelTimeList_internal_swigregister(TravelTimeList_internal)
|
||||
|
||||
class Regions(object):
|
||||
r"""Proxy of C++ Seiscomp::Regions class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
raise AttributeError("No constructor defined")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
@staticmethod
|
||||
def getFlinnEngdahlRegion(lat, lon):
|
||||
r"""getFlinnEngdahlRegion(double lat, double lon) -> std::string"""
|
||||
return _seismology.Regions_getFlinnEngdahlRegion(lat, lon)
|
||||
|
||||
@staticmethod
|
||||
def getFlinnEngdahlRegionsCount():
|
||||
r"""getFlinnEngdahlRegionsCount() -> int"""
|
||||
return _seismology.Regions_getFlinnEngdahlRegionsCount()
|
||||
|
||||
@staticmethod
|
||||
def getFlinnEngdahlRegionById(id):
|
||||
r"""getFlinnEngdahlRegionById(int id) -> std::string"""
|
||||
return _seismology.Regions_getFlinnEngdahlRegionById(id)
|
||||
|
||||
@staticmethod
|
||||
def load():
|
||||
r"""load()"""
|
||||
return _seismology.Regions_load()
|
||||
|
||||
@staticmethod
|
||||
def getRegionName(lat, lon):
|
||||
r"""getRegionName(double lat, double lon) -> std::string"""
|
||||
return _seismology.Regions_getRegionName(lat, lon)
|
||||
|
||||
@staticmethod
|
||||
def polyRegions():
|
||||
r"""polyRegions() -> Seiscomp::Geo::PolyRegions &"""
|
||||
return _seismology.Regions_polyRegions()
|
||||
__swig_destroy__ = _seismology.delete_Regions
|
||||
|
||||
# Register Regions in _seismology:
|
||||
_seismology.Regions_swigregister(Regions)
|
||||
|
||||
def Regions_getFlinnEngdahlRegion(lat, lon):
|
||||
r"""Regions_getFlinnEngdahlRegion(double lat, double lon) -> std::string"""
|
||||
return _seismology.Regions_getFlinnEngdahlRegion(lat, lon)
|
||||
|
||||
def Regions_getFlinnEngdahlRegionsCount():
|
||||
r"""Regions_getFlinnEngdahlRegionsCount() -> int"""
|
||||
return _seismology.Regions_getFlinnEngdahlRegionsCount()
|
||||
|
||||
def Regions_getFlinnEngdahlRegionById(id):
|
||||
r"""Regions_getFlinnEngdahlRegionById(int id) -> std::string"""
|
||||
return _seismology.Regions_getFlinnEngdahlRegionById(id)
|
||||
|
||||
def Regions_load():
|
||||
r"""Regions_load()"""
|
||||
return _seismology.Regions_load()
|
||||
|
||||
def Regions_getRegionName(lat, lon):
|
||||
r"""Regions_getRegionName(double lat, double lon) -> std::string"""
|
||||
return _seismology.Regions_getRegionName(lat, lon)
|
||||
|
||||
def Regions_polyRegions():
|
||||
r"""Regions_polyRegions() -> Seiscomp::Geo::PolyRegions &"""
|
||||
return _seismology.Regions_polyRegions()
|
||||
|
||||
SC3_LOCATOR_INTERFACE_VERSION = _seismology.SC3_LOCATOR_INTERFACE_VERSION
|
||||
|
||||
class SensorLocationDelegate(seiscomp.core.BaseObject):
|
||||
r"""Proxy of C++ Seiscomp::Seismology::SensorLocationDelegate class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
raise AttributeError("No constructor defined - class is abstract")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def getSensorLocation(self, pick):
|
||||
r"""getSensorLocation(SensorLocationDelegate self, Pick pick) -> SensorLocation"""
|
||||
return _seismology.SensorLocationDelegate_getSensorLocation(self, pick)
|
||||
__swig_destroy__ = _seismology.delete_SensorLocationDelegate
|
||||
|
||||
# Register SensorLocationDelegate in _seismology:
|
||||
_seismology.SensorLocationDelegate_swigregister(SensorLocationDelegate)
|
||||
|
||||
class LocatorInterface(seiscomp.core.BaseObject):
|
||||
r"""Proxy of C++ Seiscomp::Seismology::LocatorInterface class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
raise AttributeError("No constructor defined - class is abstract")
|
||||
__repr__ = _swig_repr
|
||||
F_NONE = _seismology.LocatorInterface_F_NONE
|
||||
|
||||
F_BACKAZIMUTH = _seismology.LocatorInterface_F_BACKAZIMUTH
|
||||
|
||||
F_SLOWNESS = _seismology.LocatorInterface_F_SLOWNESS
|
||||
|
||||
F_TIME = _seismology.LocatorInterface_F_TIME
|
||||
|
||||
F_ALL = _seismology.LocatorInterface_F_ALL
|
||||
|
||||
EFlagsQuantity = _seismology.LocatorInterface_EFlagsQuantity
|
||||
|
||||
NoCapability = _seismology.LocatorInterface_NoCapability
|
||||
|
||||
InitialLocation = _seismology.LocatorInterface_InitialLocation
|
||||
|
||||
FixedDepth = _seismology.LocatorInterface_FixedDepth
|
||||
|
||||
DistanceCutOff = _seismology.LocatorInterface_DistanceCutOff
|
||||
|
||||
IgnoreInitialLocation = _seismology.LocatorInterface_IgnoreInitialLocation
|
||||
|
||||
CapQuantity = _seismology.LocatorInterface_CapQuantity
|
||||
|
||||
Log = _seismology.LocatorInterface_Log
|
||||
|
||||
Warning = _seismology.LocatorInterface_Warning
|
||||
|
||||
__swig_destroy__ = _seismology.delete_LocatorInterface
|
||||
|
||||
@staticmethod
|
||||
def Create(algo):
|
||||
r"""Create(char const * algo) -> LocatorInterface"""
|
||||
return _seismology.LocatorInterface_Create(algo)
|
||||
|
||||
def name(self):
|
||||
r"""name(LocatorInterface self) -> std::string const &"""
|
||||
return _seismology.LocatorInterface_name(self)
|
||||
|
||||
def setSensorLocationDelegate(self, delegate):
|
||||
r"""setSensorLocationDelegate(LocatorInterface self, SensorLocationDelegate delegate)"""
|
||||
return _seismology.LocatorInterface_setSensorLocationDelegate(self, delegate)
|
||||
|
||||
def init(self, config):
|
||||
r"""init(LocatorInterface self, Config config) -> bool"""
|
||||
return _seismology.LocatorInterface_init(self, config)
|
||||
|
||||
def parameters(self):
|
||||
r"""parameters(LocatorInterface self) -> VectorStr"""
|
||||
return _seismology.LocatorInterface_parameters(self)
|
||||
|
||||
def parameter(self, name):
|
||||
r"""parameter(LocatorInterface self, std::string const & name) -> std::string"""
|
||||
return _seismology.LocatorInterface_parameter(self, name)
|
||||
|
||||
def setParameter(self, name, value):
|
||||
r"""setParameter(LocatorInterface self, std::string const & name, std::string const & value) -> bool"""
|
||||
return _seismology.LocatorInterface_setParameter(self, name, value)
|
||||
|
||||
def profiles(self):
|
||||
r"""profiles(LocatorInterface self) -> VectorStr"""
|
||||
return _seismology.LocatorInterface_profiles(self)
|
||||
|
||||
def setProfile(self, name):
|
||||
r"""setProfile(LocatorInterface self, std::string const & name)"""
|
||||
return _seismology.LocatorInterface_setProfile(self, name)
|
||||
|
||||
def capabilities(self):
|
||||
r"""capabilities(LocatorInterface self) -> int"""
|
||||
return _seismology.LocatorInterface_capabilities(self)
|
||||
|
||||
def locate(self, *args):
|
||||
r"""
|
||||
locate(LocatorInterface self, Seiscomp::Seismology::LocatorInterface::PickList & pickList) -> Origin
|
||||
locate(LocatorInterface self, Seiscomp::Seismology::LocatorInterface::PickList & pickList, double initLat, double initLon, double initDepth, Time initTime) -> Origin
|
||||
"""
|
||||
return _seismology.LocatorInterface_locate(self, *args)
|
||||
|
||||
def relocate(self, origin):
|
||||
r"""relocate(LocatorInterface self, Origin origin) -> Origin"""
|
||||
return _seismology.LocatorInterface_relocate(self, origin)
|
||||
|
||||
def lastMessage(self, arg2):
|
||||
r"""lastMessage(LocatorInterface self, Seiscomp::Seismology::LocatorInterface::MessageType arg2) -> std::string"""
|
||||
return _seismology.LocatorInterface_lastMessage(self, arg2)
|
||||
|
||||
def supports(self, arg2):
|
||||
r"""supports(LocatorInterface self, Seiscomp::Seismology::LocatorInterface::Capability arg2) -> bool"""
|
||||
return _seismology.LocatorInterface_supports(self, arg2)
|
||||
|
||||
def setFixedDepth(self, depth, use=True):
|
||||
r"""setFixedDepth(LocatorInterface self, double depth, bool use=True)"""
|
||||
return _seismology.LocatorInterface_setFixedDepth(self, depth, use)
|
||||
|
||||
def useFixedDepth(self, use=True):
|
||||
r"""useFixedDepth(LocatorInterface self, bool use=True)"""
|
||||
return _seismology.LocatorInterface_useFixedDepth(self, use)
|
||||
|
||||
def fixedDepth(self):
|
||||
r"""fixedDepth(LocatorInterface self) -> double"""
|
||||
return _seismology.LocatorInterface_fixedDepth(self)
|
||||
|
||||
def usingFixedDepth(self):
|
||||
r"""usingFixedDepth(LocatorInterface self) -> bool"""
|
||||
return _seismology.LocatorInterface_usingFixedDepth(self)
|
||||
|
||||
def releaseDepth(self):
|
||||
r"""releaseDepth(LocatorInterface self)"""
|
||||
return _seismology.LocatorInterface_releaseDepth(self)
|
||||
|
||||
def setDistanceCutOff(self, distance):
|
||||
r"""setDistanceCutOff(LocatorInterface self, double distance)"""
|
||||
return _seismology.LocatorInterface_setDistanceCutOff(self, distance)
|
||||
|
||||
def releaseDistanceCutOff(self):
|
||||
r"""releaseDistanceCutOff(LocatorInterface self)"""
|
||||
return _seismology.LocatorInterface_releaseDistanceCutOff(self)
|
||||
|
||||
def isInitialLocationIgnored(self):
|
||||
r"""isInitialLocationIgnored(LocatorInterface self) -> bool"""
|
||||
return _seismology.LocatorInterface_isInitialLocationIgnored(self)
|
||||
|
||||
def setIgnoreInitialLocation(self, f):
|
||||
r"""setIgnoreInitialLocation(LocatorInterface self, bool f)"""
|
||||
return _seismology.LocatorInterface_setIgnoreInitialLocation(self, f)
|
||||
|
||||
def getPick(self, arrival):
|
||||
r"""getPick(LocatorInterface self, Arrival arrival) -> Pick"""
|
||||
return _seismology.LocatorInterface_getPick(self, arrival)
|
||||
|
||||
def getSensorLocation(self, pick):
|
||||
r"""getSensorLocation(LocatorInterface self, Pick pick) -> SensorLocation"""
|
||||
return _seismology.LocatorInterface_getSensorLocation(self, pick)
|
||||
|
||||
# Register LocatorInterface in _seismology:
|
||||
_seismology.LocatorInterface_swigregister(LocatorInterface)
|
||||
|
||||
def LocatorInterface_Create(algo):
|
||||
r"""LocatorInterface_Create(char const * algo) -> LocatorInterface"""
|
||||
return _seismology.LocatorInterface_Create(algo)
|
||||
|
||||
class PickNotFoundException(seiscomp.core.GeneralException):
|
||||
r"""Proxy of C++ Seiscomp::Seismology::PickNotFoundException class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
r"""
|
||||
__init__(PickNotFoundException self) -> PickNotFoundException
|
||||
__init__(PickNotFoundException self, std::string const & str) -> PickNotFoundException
|
||||
"""
|
||||
_seismology.PickNotFoundException_swiginit(self, _seismology.new_PickNotFoundException(*args))
|
||||
__swig_destroy__ = _seismology.delete_PickNotFoundException
|
||||
|
||||
# Register PickNotFoundException in _seismology:
|
||||
_seismology.PickNotFoundException_swigregister(PickNotFoundException)
|
||||
|
||||
class LocatorException(seiscomp.core.GeneralException):
|
||||
r"""Proxy of C++ Seiscomp::Seismology::LocatorException class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
r"""
|
||||
__init__(LocatorException self) -> LocatorException
|
||||
__init__(LocatorException self, std::string const & str) -> LocatorException
|
||||
"""
|
||||
_seismology.LocatorException_swiginit(self, _seismology.new_LocatorException(*args))
|
||||
__swig_destroy__ = _seismology.delete_LocatorException
|
||||
|
||||
# Register LocatorException in _seismology:
|
||||
_seismology.LocatorException_swigregister(LocatorException)
|
||||
|
||||
class StationNotFoundException(seiscomp.core.GeneralException):
|
||||
r"""Proxy of C++ Seiscomp::Seismology::StationNotFoundException class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
r"""
|
||||
__init__(StationNotFoundException self) -> StationNotFoundException
|
||||
__init__(StationNotFoundException self, std::string const & str) -> StationNotFoundException
|
||||
"""
|
||||
_seismology.StationNotFoundException_swiginit(self, _seismology.new_StationNotFoundException(*args))
|
||||
__swig_destroy__ = _seismology.delete_StationNotFoundException
|
||||
|
||||
# Register StationNotFoundException in _seismology:
|
||||
_seismology.StationNotFoundException_swigregister(StationNotFoundException)
|
||||
|
||||
|
||||
def arrivalToFlags(arrival):
|
||||
r"""arrivalToFlags(Arrival arrival) -> int"""
|
||||
return _seismology.arrivalToFlags(arrival)
|
||||
|
||||
def flagsToArrival(arrival, flags):
|
||||
r"""flagsToArrival(Arrival arrival, int flags)"""
|
||||
return _seismology.flagsToArrival(arrival, flags)
|
||||
class FileNotFoundError(seiscomp.core.GeneralException):
|
||||
r"""Proxy of C++ Seiscomp::FileNotFoundError class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, filename):
|
||||
r"""__init__(FileNotFoundError self, std::string const & filename) -> FileNotFoundError"""
|
||||
_seismology.FileNotFoundError_swiginit(self, _seismology.new_FileNotFoundError(filename))
|
||||
__swig_destroy__ = _seismology.delete_FileNotFoundError
|
||||
|
||||
# Register FileNotFoundError in _seismology:
|
||||
_seismology.FileNotFoundError_swigregister(FileNotFoundError)
|
||||
|
||||
class MultipleModelsError(seiscomp.core.GeneralException):
|
||||
r"""Proxy of C++ Seiscomp::MultipleModelsError class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, model):
|
||||
r"""__init__(MultipleModelsError self, std::string const & model) -> MultipleModelsError"""
|
||||
_seismology.MultipleModelsError_swiginit(self, _seismology.new_MultipleModelsError(model))
|
||||
__swig_destroy__ = _seismology.delete_MultipleModelsError
|
||||
|
||||
# Register MultipleModelsError in _seismology:
|
||||
_seismology.MultipleModelsError_swigregister(MultipleModelsError)
|
||||
|
||||
class NoPhaseError(seiscomp.core.GeneralException):
|
||||
r"""Proxy of C++ Seiscomp::NoPhaseError class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self):
|
||||
r"""__init__(NoPhaseError self) -> NoPhaseError"""
|
||||
_seismology.NoPhaseError_swiginit(self, _seismology.new_NoPhaseError())
|
||||
__swig_destroy__ = _seismology.delete_NoPhaseError
|
||||
|
||||
# Register NoPhaseError in _seismology:
|
||||
_seismology.NoPhaseError_swigregister(NoPhaseError)
|
||||
|
||||
class TravelTime(object):
|
||||
r"""Proxy of C++ Seiscomp::TravelTime class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
r"""
|
||||
__init__(TravelTime self) -> TravelTime
|
||||
__init__(TravelTime self, std::string const & _phase, double _time, double _dtdd, double _dtdh, double _dddp, double _takeoff) -> TravelTime
|
||||
"""
|
||||
_seismology.TravelTime_swiginit(self, _seismology.new_TravelTime(*args))
|
||||
|
||||
def __eq__(self, other):
|
||||
r"""__eq__(TravelTime self, TravelTime other) -> bool"""
|
||||
return _seismology.TravelTime___eq__(self, other)
|
||||
|
||||
def __lt__(self, other):
|
||||
r"""__lt__(TravelTime self, TravelTime other) -> bool"""
|
||||
return _seismology.TravelTime___lt__(self, other)
|
||||
phase = property(_seismology.TravelTime_phase_get, _seismology.TravelTime_phase_set, doc=r"""phase : std::string""")
|
||||
time = property(_seismology.TravelTime_time_get, _seismology.TravelTime_time_set, doc=r"""time : double""")
|
||||
dtdd = property(_seismology.TravelTime_dtdd_get, _seismology.TravelTime_dtdd_set, doc=r"""dtdd : double""")
|
||||
dtdh = property(_seismology.TravelTime_dtdh_get, _seismology.TravelTime_dtdh_set, doc=r"""dtdh : double""")
|
||||
dddp = property(_seismology.TravelTime_dddp_get, _seismology.TravelTime_dddp_set, doc=r"""dddp : double""")
|
||||
takeoff = property(_seismology.TravelTime_takeoff_get, _seismology.TravelTime_takeoff_set, doc=r"""takeoff : double""")
|
||||
azi = property(_seismology.TravelTime_azi_get, _seismology.TravelTime_azi_set, doc=r"""azi : Seiscomp::Core::Optional<(double)>::Impl""")
|
||||
__swig_destroy__ = _seismology.delete_TravelTime
|
||||
|
||||
# Register TravelTime in _seismology:
|
||||
_seismology.TravelTime_swigregister(TravelTime)
|
||||
|
||||
class TravelTimeList(TravelTimeList_internal):
|
||||
r"""Proxy of C++ Seiscomp::TravelTimeList class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def isEmpty(self):
|
||||
r"""isEmpty(TravelTimeList self) -> bool"""
|
||||
return _seismology.TravelTimeList_isEmpty(self)
|
||||
|
||||
def sortByTime(self):
|
||||
r"""sortByTime(TravelTimeList self)"""
|
||||
return _seismology.TravelTimeList_sortByTime(self)
|
||||
depth = property(_seismology.TravelTimeList_depth_get, _seismology.TravelTimeList_depth_set, doc=r"""depth : double""")
|
||||
delta = property(_seismology.TravelTimeList_delta_get, _seismology.TravelTimeList_delta_set, doc=r"""delta : double""")
|
||||
|
||||
def __init__(self):
|
||||
r"""__init__(TravelTimeList self) -> TravelTimeList"""
|
||||
_seismology.TravelTimeList_swiginit(self, _seismology.new_TravelTimeList())
|
||||
__swig_destroy__ = _seismology.delete_TravelTimeList
|
||||
|
||||
# Register TravelTimeList in _seismology:
|
||||
_seismology.TravelTimeList_swigregister(TravelTimeList)
|
||||
|
||||
class TravelTimeTableInterface(seiscomp.core.BaseObject):
|
||||
r"""Proxy of C++ Seiscomp::TravelTimeTableInterface class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
raise AttributeError("No constructor defined - class is abstract")
|
||||
__repr__ = _swig_repr
|
||||
__swig_destroy__ = _seismology.delete_TravelTimeTableInterface
|
||||
|
||||
@staticmethod
|
||||
def Create(name):
|
||||
r"""Create(char const * name) -> TravelTimeTableInterface"""
|
||||
return _seismology.TravelTimeTableInterface_Create(name)
|
||||
|
||||
def setModel(self, model):
|
||||
r"""setModel(TravelTimeTableInterface self, std::string const & model) -> bool"""
|
||||
return _seismology.TravelTimeTableInterface_setModel(self, model)
|
||||
|
||||
def model(self):
|
||||
r"""model(TravelTimeTableInterface self) -> std::string const &"""
|
||||
return _seismology.TravelTimeTableInterface_model(self)
|
||||
|
||||
def compute(self, *args):
|
||||
r"""
|
||||
compute(TravelTimeTableInterface self, double lat1, double lon1, double dep1, double lat2, double lon2, double elev2=0., int ellc=1) -> TravelTimeList
|
||||
compute(TravelTimeTableInterface self, char const * phase, double lat1, double lon1, double dep1, double lat2, double lon2, double elev2=0., int ellc=1) -> TravelTime
|
||||
"""
|
||||
return _seismology.TravelTimeTableInterface_compute(self, *args)
|
||||
|
||||
def computeFirst(self, lat1, lon1, dep1, lat2, lon2, elev2=0., ellc=1):
|
||||
r"""computeFirst(TravelTimeTableInterface self, double lat1, double lon1, double dep1, double lat2, double lon2, double elev2=0., int ellc=1) -> TravelTime"""
|
||||
return _seismology.TravelTimeTableInterface_computeFirst(self, lat1, lon1, dep1, lat2, lon2, elev2, ellc)
|
||||
|
||||
def computeTime(self, phase, lat1, lon1, dep1, lat2, lon2, elev2=0., ellc=1):
|
||||
r"""computeTime(TravelTimeTableInterface self, char const * phase, double lat1, double lon1, double dep1, double lat2, double lon2, double elev2=0., int ellc=1) -> double"""
|
||||
return _seismology.TravelTimeTableInterface_computeTime(self, phase, lat1, lon1, dep1, lat2, lon2, elev2, ellc)
|
||||
|
||||
# Register TravelTimeTableInterface in _seismology:
|
||||
_seismology.TravelTimeTableInterface_swigregister(TravelTimeTableInterface)
|
||||
|
||||
def TravelTimeTableInterface_Create(name):
|
||||
r"""TravelTimeTableInterface_Create(char const * name) -> TravelTimeTableInterface"""
|
||||
return _seismology.TravelTimeTableInterface_Create(name)
|
||||
|
||||
class TravelTimeTable(TravelTimeTableInterface):
|
||||
r"""Proxy of C++ Seiscomp::TravelTimeTable class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self):
|
||||
r"""__init__(TravelTimeTable self) -> TravelTimeTable"""
|
||||
_seismology.TravelTimeTable_swiginit(self, _seismology.new_TravelTimeTable())
|
||||
|
||||
def setModel(self, model):
|
||||
r"""setModel(TravelTimeTable self, std::string const & model) -> bool"""
|
||||
return _seismology.TravelTimeTable_setModel(self, model)
|
||||
|
||||
def model(self):
|
||||
r"""model(TravelTimeTable self) -> std::string const &"""
|
||||
return _seismology.TravelTimeTable_model(self)
|
||||
|
||||
def compute(self, *args):
|
||||
r"""
|
||||
compute(TravelTimeTable self, double lat1, double lon1, double dep1, double lat2, double lon2, double elev2=0., int ellc=1) -> TravelTimeList
|
||||
compute(TravelTimeTable self, char const * phase, double lat1, double lon1, double dep1, double lat2, double lon2, double elev2=0., int ellc=1) -> TravelTime
|
||||
"""
|
||||
return _seismology.TravelTimeTable_compute(self, *args)
|
||||
|
||||
def computeFirst(self, lat1, lon1, dep1, lat2, lon2, elev2=0., ellc=1):
|
||||
r"""computeFirst(TravelTimeTable self, double lat1, double lon1, double dep1, double lat2, double lon2, double elev2=0., int ellc=1) -> TravelTime"""
|
||||
return _seismology.TravelTimeTable_computeFirst(self, lat1, lon1, dep1, lat2, lon2, elev2, ellc)
|
||||
__swig_destroy__ = _seismology.delete_TravelTimeTable
|
||||
|
||||
# Register TravelTimeTable in _seismology:
|
||||
_seismology.TravelTimeTable_swigregister(TravelTimeTable)
|
||||
|
||||
|
||||
def ellipcorr(phase, lat1, lon1, lat2, lon2, depth, corr):
|
||||
r"""ellipcorr(std::string const & phase, double lat1, double lon1, double lat2, double lon2, double depth, double & corr) -> bool"""
|
||||
return _seismology.ellipcorr(phase, lat1, lon1, lat2, lon2, depth, corr)
|
||||
|
||||
def getPhase(arg1, phaseCode):
|
||||
r"""getPhase(TravelTimeList arg1, std::string const & phaseCode) -> TravelTime"""
|
||||
return _seismology.getPhase(arg1, phaseCode)
|
||||
|
||||
def firstArrivalP(arg1):
|
||||
r"""firstArrivalP(TravelTimeList arg1) -> TravelTime"""
|
||||
return _seismology.firstArrivalP(arg1)
|
||||
class LocSATErrorEllipsoid(object):
|
||||
r"""Proxy of C++ Seiscomp::LocSATErrorEllipsoid class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self):
|
||||
r"""__init__(LocSATErrorEllipsoid self) -> LocSATErrorEllipsoid"""
|
||||
_seismology.LocSATErrorEllipsoid_swiginit(self, _seismology.new_LocSATErrorEllipsoid())
|
||||
sxx = property(_seismology.LocSATErrorEllipsoid_sxx_get, _seismology.LocSATErrorEllipsoid_sxx_set, doc=r"""sxx : float""")
|
||||
syy = property(_seismology.LocSATErrorEllipsoid_syy_get, _seismology.LocSATErrorEllipsoid_syy_set, doc=r"""syy : float""")
|
||||
szz = property(_seismology.LocSATErrorEllipsoid_szz_get, _seismology.LocSATErrorEllipsoid_szz_set, doc=r"""szz : float""")
|
||||
stt = property(_seismology.LocSATErrorEllipsoid_stt_get, _seismology.LocSATErrorEllipsoid_stt_set, doc=r"""stt : float""")
|
||||
sxy = property(_seismology.LocSATErrorEllipsoid_sxy_get, _seismology.LocSATErrorEllipsoid_sxy_set, doc=r"""sxy : float""")
|
||||
sxz = property(_seismology.LocSATErrorEllipsoid_sxz_get, _seismology.LocSATErrorEllipsoid_sxz_set, doc=r"""sxz : float""")
|
||||
syz = property(_seismology.LocSATErrorEllipsoid_syz_get, _seismology.LocSATErrorEllipsoid_syz_set, doc=r"""syz : float""")
|
||||
stx = property(_seismology.LocSATErrorEllipsoid_stx_get, _seismology.LocSATErrorEllipsoid_stx_set, doc=r"""stx : float""")
|
||||
sty = property(_seismology.LocSATErrorEllipsoid_sty_get, _seismology.LocSATErrorEllipsoid_sty_set, doc=r"""sty : float""")
|
||||
stz = property(_seismology.LocSATErrorEllipsoid_stz_get, _seismology.LocSATErrorEllipsoid_stz_set, doc=r"""stz : float""")
|
||||
sdobs = property(_seismology.LocSATErrorEllipsoid_sdobs_get, _seismology.LocSATErrorEllipsoid_sdobs_set, doc=r"""sdobs : float""")
|
||||
smajax = property(_seismology.LocSATErrorEllipsoid_smajax_get, _seismology.LocSATErrorEllipsoid_smajax_set, doc=r"""smajax : float""")
|
||||
sminax = property(_seismology.LocSATErrorEllipsoid_sminax_get, _seismology.LocSATErrorEllipsoid_sminax_set, doc=r"""sminax : float""")
|
||||
strike = property(_seismology.LocSATErrorEllipsoid_strike_get, _seismology.LocSATErrorEllipsoid_strike_set, doc=r"""strike : float""")
|
||||
sdepth = property(_seismology.LocSATErrorEllipsoid_sdepth_get, _seismology.LocSATErrorEllipsoid_sdepth_set, doc=r"""sdepth : float""")
|
||||
stime = property(_seismology.LocSATErrorEllipsoid_stime_get, _seismology.LocSATErrorEllipsoid_stime_set, doc=r"""stime : float""")
|
||||
conf = property(_seismology.LocSATErrorEllipsoid_conf_get, _seismology.LocSATErrorEllipsoid_conf_set, doc=r"""conf : float""")
|
||||
__swig_destroy__ = _seismology.delete_LocSATErrorEllipsoid
|
||||
|
||||
# Register LocSATErrorEllipsoid in _seismology:
|
||||
_seismology.LocSATErrorEllipsoid_swigregister(LocSATErrorEllipsoid)
|
||||
|
||||
class LocSAT(LocatorInterface):
|
||||
r"""Proxy of C++ Seiscomp::LocSAT class."""
|
||||
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self):
|
||||
r"""__init__(LocSAT self) -> LocSAT"""
|
||||
_seismology.LocSAT_swiginit(self, _seismology.new_LocSAT())
|
||||
__swig_destroy__ = _seismology.delete_LocSAT
|
||||
|
||||
def init(self, config):
|
||||
r"""init(LocSAT self, Config config) -> bool"""
|
||||
return _seismology.LocSAT_init(self, config)
|
||||
|
||||
def parameters(self):
|
||||
r"""parameters(LocSAT self) -> VectorStr"""
|
||||
return _seismology.LocSAT_parameters(self)
|
||||
|
||||
def parameter(self, name):
|
||||
r"""parameter(LocSAT self, std::string const & name) -> std::string"""
|
||||
return _seismology.LocSAT_parameter(self, name)
|
||||
|
||||
def setParameter(self, name, value):
|
||||
r"""setParameter(LocSAT self, std::string const & name, std::string const & value) -> bool"""
|
||||
return _seismology.LocSAT_setParameter(self, name, value)
|
||||
|
||||
def profiles(self):
|
||||
r"""profiles(LocSAT self) -> VectorStr"""
|
||||
return _seismology.LocSAT_profiles(self)
|
||||
|
||||
def setProfile(self, name):
|
||||
r"""setProfile(LocSAT self, std::string const & name)"""
|
||||
return _seismology.LocSAT_setProfile(self, name)
|
||||
|
||||
@staticmethod
|
||||
def setDefaultProfile(name):
|
||||
r"""setDefaultProfile(std::string const & name)"""
|
||||
return _seismology.LocSAT_setDefaultProfile(name)
|
||||
|
||||
@staticmethod
|
||||
def currentDefaultProfile():
|
||||
r"""currentDefaultProfile() -> std::string"""
|
||||
return _seismology.LocSAT_currentDefaultProfile()
|
||||
|
||||
def setNewOriginID(self, newOriginID):
|
||||
r"""setNewOriginID(LocSAT self, std::string const & newOriginID)"""
|
||||
return _seismology.LocSAT_setNewOriginID(self, newOriginID)
|
||||
|
||||
def capabilities(self):
|
||||
r"""capabilities(LocSAT self) -> int"""
|
||||
return _seismology.LocSAT_capabilities(self)
|
||||
|
||||
def locate(self, *args):
|
||||
r"""
|
||||
locate(LocSAT self, Seiscomp::Seismology::LocatorInterface::PickList & pickList) -> Origin
|
||||
locate(LocSAT self, Seiscomp::Seismology::LocatorInterface::PickList & pickList, double initLat, double initLon, double initDepth, Time initTime) -> Origin
|
||||
"""
|
||||
return _seismology.LocSAT_locate(self, *args)
|
||||
|
||||
def relocate(self, origin):
|
||||
r"""relocate(LocSAT self, Origin origin) -> Origin"""
|
||||
return _seismology.LocSAT_relocate(self, origin)
|
||||
|
||||
def errorEllipsoid(self):
|
||||
r"""errorEllipsoid(LocSAT self) -> LocSATErrorEllipsoid"""
|
||||
return _seismology.LocSAT_errorEllipsoid(self)
|
||||
|
||||
# Register LocSAT in _seismology:
|
||||
_seismology.LocSAT_swigregister(LocSAT)
|
||||
|
||||
def LocSAT_setDefaultProfile(name):
|
||||
r"""LocSAT_setDefaultProfile(std::string const & name)"""
|
||||
return _seismology.LocSAT_setDefaultProfile(name)
|
||||
|
||||
def LocSAT_currentDefaultProfile():
|
||||
r"""LocSAT_currentDefaultProfile() -> std::string"""
|
||||
return _seismology.LocSAT_currentDefaultProfile()
|
||||
|
||||
|
||||
|
||||
509
lib/python/seiscomp/setup.py
Normal file
509
lib/python/seiscomp/setup.py
Normal file
@ -0,0 +1,509 @@
|
||||
############################################################################
|
||||
# Copyright (C) by gempa GmbH, GFZ Potsdam #
|
||||
# #
|
||||
# You can redistribute and/or modify this program under the #
|
||||
# terms of the SeisComP Public License. #
|
||||
# #
|
||||
# This program is distributed in the hope that it will be useful, #
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
|
||||
# SeisComP Public License for more details. #
|
||||
############################################################################
|
||||
|
||||
import os
|
||||
import sys
|
||||
import glob
|
||||
import getpass
|
||||
|
||||
try:
|
||||
# Python 2.5
|
||||
from xml.etree import ElementTree
|
||||
from xml.parsers.expat import ExpatError as ParseError
|
||||
except ImportError:
|
||||
from elementtree import ElementTree
|
||||
from xml.parsers.expat import ExpatError as ParseError
|
||||
|
||||
from seiscomp import config
|
||||
|
||||
# Python version depended string conversion
|
||||
if sys.version_info[0] < 3:
|
||||
py3input = raw_input #pylint: disable=E0602
|
||||
else:
|
||||
py3input = input
|
||||
|
||||
|
||||
def tagname(element):
|
||||
names = element.tag.split("}")
|
||||
if len(names) == 0:
|
||||
return ""
|
||||
|
||||
return names.pop()
|
||||
|
||||
|
||||
def oneliner(txt):
|
||||
return txt.strip().replace("\n", "")
|
||||
|
||||
|
||||
def block(txt, width=80):
|
||||
lines = [l.strip() for l in txt.strip().replace("\r", "").split('\n')]
|
||||
line = "\n".join(lines)
|
||||
|
||||
current = 0
|
||||
lines = []
|
||||
|
||||
while current < len(line):
|
||||
end = line.find('\n', current)
|
||||
if (end == -1) or (end - current > width):
|
||||
if len(line) - current > width:
|
||||
end = line.rfind(' ', current, current+width)
|
||||
if end == -1:
|
||||
end = line.find(' ', current)
|
||||
if end == -1:
|
||||
end = len(line)
|
||||
else:
|
||||
end = len(line)
|
||||
|
||||
lines.append(line[current:end].strip())
|
||||
|
||||
current = end + 1
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
class SetupNode:
|
||||
def __init__(self, parent, inp, next = None):
|
||||
self.parent = parent
|
||||
self.next = next
|
||||
self.child = None
|
||||
self.activeChild = None
|
||||
|
||||
self.modname = ""
|
||||
self.groupname = ""
|
||||
self.input = inp
|
||||
self.value = ""
|
||||
self.path = ""
|
||||
self.optionValue = None
|
||||
self.isOption = False
|
||||
|
||||
|
||||
class Option:
|
||||
"""
|
||||
Setup input option wrapper.
|
||||
"""
|
||||
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
self.desc = None
|
||||
self.inputs = []
|
||||
|
||||
|
||||
class Input:
|
||||
"""
|
||||
Setup input wrapper.
|
||||
"""
|
||||
|
||||
def __init__(self, name, t, default_value=None):
|
||||
self.name = name
|
||||
self.type = t
|
||||
self.default_value = default_value
|
||||
self.text = None
|
||||
self.desc = None
|
||||
self.echo = None
|
||||
self.options = []
|
||||
|
||||
|
||||
def dumpTree(cfg, node):
|
||||
if node.input:
|
||||
cfg.setString(node.modname + "." + node.path, node.value)
|
||||
|
||||
if node.activeChild:
|
||||
if node.isOption:
|
||||
dumpTree(cfg, node.activeChild.child)
|
||||
else:
|
||||
dumpTree(cfg, node.activeChild)
|
||||
|
||||
if not node.next is None:
|
||||
dumpTree(cfg, node.next)
|
||||
|
||||
|
||||
class Simple:
|
||||
"""
|
||||
Simple console setup handler that parses all description xml files
|
||||
and extracts the setup part. It asks for all available setting line
|
||||
by line and passes the resulting configuration back which is then
|
||||
passed to all init modules that have a setup method.
|
||||
"""
|
||||
|
||||
def __init__(self, args = []):
|
||||
self.modules = args
|
||||
self.setupTree = SetupNode(None, None)
|
||||
self.paths = []
|
||||
self.currentNode = None
|
||||
|
||||
def run(self, env):
|
||||
desc_pattern = os.path.join(
|
||||
env.SEISCOMP_ROOT, "etc", "descriptions", "*.xml")
|
||||
xmls = glob.glob(desc_pattern)
|
||||
|
||||
setup_groups = {}
|
||||
|
||||
for f in xmls:
|
||||
try:
|
||||
tree = ElementTree.parse(f)
|
||||
except ParseError as xxx_todo_changeme:
|
||||
(err) = xxx_todo_changeme
|
||||
sys.stderr.write("%s: parsing XML failed: %s\n" % (f, err))
|
||||
continue
|
||||
|
||||
root = tree.getroot()
|
||||
if tagname(root) != "seiscomp":
|
||||
sys.stderr.write(
|
||||
"%s: wrong root tag, expected 'seiscomp'\n" % f)
|
||||
continue
|
||||
|
||||
# Read all modules
|
||||
mods = tree.findall("module")
|
||||
|
||||
for mod in mods:
|
||||
modname = mod.get('name')
|
||||
if not modname:
|
||||
sys.stderr.write("%s: skipping module without name\n" % f)
|
||||
continue
|
||||
|
||||
if modname in setup_groups:
|
||||
raise Exception(
|
||||
"%s: duplicate module name: %s" % (f, modname))
|
||||
|
||||
if self.modules and modname not in self.modules:
|
||||
continue
|
||||
|
||||
setup = mod.find("setup")
|
||||
if setup is None:
|
||||
continue
|
||||
|
||||
groups = setup.findall("group")
|
||||
if len(groups) == 0:
|
||||
continue
|
||||
|
||||
setup_groups[modname] = groups
|
||||
|
||||
# Read all plugin's
|
||||
plugins = tree.findall("plugin")
|
||||
|
||||
for plugin in plugins:
|
||||
try:
|
||||
modname = plugin.find('extends').text.strip()
|
||||
except:
|
||||
raise Exception("%s: plugin does not define 'extends'" % f)
|
||||
|
||||
if modname.find('\n') >= 0:
|
||||
raise Exception("%s: wrong module name in plugin." \
|
||||
"extends: no newlines allowed" % f)
|
||||
|
||||
if not modname:
|
||||
sys.stderr.write("%s: skipping module without name\n" % f)
|
||||
continue
|
||||
|
||||
setup = plugin.find("setup")
|
||||
if setup is None:
|
||||
continue
|
||||
|
||||
groups = setup.findall("group")
|
||||
if len(groups) == 0:
|
||||
continue
|
||||
|
||||
if modname in setup_groups:
|
||||
setup_groups[modname] += groups
|
||||
else:
|
||||
setup_groups[modname] = groups
|
||||
|
||||
for name, groups in sorted(setup_groups.items()):
|
||||
self.addGroups(self.setupTree, name, groups)
|
||||
|
||||
# Always descend to the first child (if available)
|
||||
self.setupTree.activeChild = self.setupTree.child
|
||||
self.currentNode = self.setupTree.activeChild
|
||||
|
||||
sys.stdout.write('''
|
||||
====================================================================
|
||||
SeisComP setup
|
||||
====================================================================
|
||||
|
||||
This initializes the configuration of your installation.
|
||||
If you already made adjustments to the configuration files
|
||||
be warned that this setup will overwrite existing parameters
|
||||
with default values. This is not a configurator for all
|
||||
options of your setup but helps to setup initial standard values.
|
||||
|
||||
--------------------------------------------------------------------
|
||||
Hint: Entered values starting with a dot (.) are handled
|
||||
as commands. Available commands are:
|
||||
|
||||
quit: Quit setup without modification to your configuration.
|
||||
back: Go back to the previous parameter.
|
||||
help: Show help about the current parameter (if available).
|
||||
|
||||
If you need to enter a value with a leading dot, escape it
|
||||
with backslash, e.g. "\\.value".
|
||||
--------------------------------------------------------------------
|
||||
|
||||
''')
|
||||
|
||||
try:
|
||||
self.fillTree()
|
||||
except StopIteration:
|
||||
raise Exception("aborted by user")
|
||||
|
||||
cfg = config.Config()
|
||||
dumpTree(cfg, self.setupTree)
|
||||
|
||||
return cfg
|
||||
|
||||
def addGroups(self, node, modname, groups):
|
||||
for g in groups:
|
||||
self.addInputs(None, node, modname, g.get(
|
||||
'name'), g, g.get('name', "") + ".")
|
||||
|
||||
def addInputs(self, obj, parent, modname, group, xml, prefix):
|
||||
childs = parent.child;
|
||||
if not childs is None:
|
||||
while not childs.next is None:
|
||||
childs = childs.next
|
||||
|
||||
inputs = xml.findall("input")
|
||||
for inp in inputs:
|
||||
name = inp.get('name')
|
||||
if not name:
|
||||
raise Exception("%s: no name defined" % prefix)
|
||||
|
||||
input_ = Input(name, inp.get('type'), inp.get('default'))
|
||||
try:
|
||||
input_.text = oneliner(inp.find('text').text)
|
||||
except Exception:
|
||||
input_.text = input_.name
|
||||
|
||||
try:
|
||||
input_.desc = block(inp.find('description').text)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
input_.echo = inp.get('echo')
|
||||
|
||||
if obj:
|
||||
obj.inputs.append(input_)
|
||||
|
||||
opts = inp.findall("option")
|
||||
|
||||
node = SetupNode(parent, input_)
|
||||
node.path = prefix + input_.name
|
||||
node.value = input_.default_value
|
||||
node.modname = modname
|
||||
node.groupname = group
|
||||
node.isOption = len(opts) > 0
|
||||
|
||||
if childs is None:
|
||||
childs = node
|
||||
parent.child = childs
|
||||
else:
|
||||
childs.next = node
|
||||
childs = childs.next;
|
||||
|
||||
options = node.child
|
||||
|
||||
for opt in opts:
|
||||
value = opt.get('value')
|
||||
if not value:
|
||||
raise Exception("%s: option without value" % prefix)
|
||||
|
||||
optionNode = SetupNode(node, input_)
|
||||
optionNode.path = node.path + "." + value
|
||||
optionNode.modname = modname
|
||||
optionNode.groupname = group
|
||||
optionNode.isOption = False
|
||||
optionNode.optionValue = value
|
||||
|
||||
option = Option(value)
|
||||
try:
|
||||
option.desc = block(opt.find('description').text, 74)
|
||||
except Exception:
|
||||
pass
|
||||
input_.options.append(option)
|
||||
|
||||
if options is None:
|
||||
options = optionNode
|
||||
node.child = options
|
||||
else:
|
||||
options.next = optionNode
|
||||
options = options.next
|
||||
|
||||
self.addInputs(option, optionNode, modname,
|
||||
group, opt, node.path + ".")
|
||||
|
||||
def fillTree(self):
|
||||
while True:
|
||||
if not self.currentNode:
|
||||
sys.stdout.write("\nFinished setup\n--------------\n\n")
|
||||
sys.stdout.write("P) Proceed to apply configuration\n")
|
||||
sys.stdout.write("D) Dump entered parameters\n")
|
||||
sys.stdout.write("B) Back to last parameter\n")
|
||||
sys.stdout.write("Q) Quit without changes\n")
|
||||
|
||||
value = py3input('Command? [P]: ').upper()
|
||||
if value == "Q":
|
||||
raise StopIteration()
|
||||
if value == "D":
|
||||
sys.stdout.write("\n----\n")
|
||||
cfg = config.Config()
|
||||
dumpTree(cfg, self.setupTree)
|
||||
cfg.writeConfig("-")
|
||||
sys.stdout.write("----\n\n")
|
||||
continue
|
||||
if value == "P" or not value:
|
||||
sys.stdout.write("\nRunning setup\n-------------\n\n")
|
||||
return
|
||||
if value == "B":
|
||||
self.prevStep()
|
||||
continue
|
||||
|
||||
sys.stdout.write("\nEnter either p, b or q\n")
|
||||
continue
|
||||
|
||||
if not self.currentNode.input:
|
||||
self.nextStep()
|
||||
continue
|
||||
|
||||
default_value = self.valueToString(self.currentNode)
|
||||
|
||||
isChoice = False
|
||||
isPassword = False
|
||||
if self.currentNode.input.echo == "password":
|
||||
isPassword = True
|
||||
|
||||
node_text = default_value
|
||||
prompt = self.currentNode.input.text
|
||||
|
||||
if isPassword:
|
||||
node_text = '*' * len(node_text)
|
||||
prompt += " (input not echoed)"
|
||||
|
||||
if (not self.currentNode.input.type or \
|
||||
self.currentNode.input.type != "boolean") and \
|
||||
len(self.currentNode.input.options) > 0:
|
||||
idx = 0
|
||||
def_idx = 0
|
||||
for opt in self.currentNode.input.options:
|
||||
sys.stdout.write("%2d) %s\n" % (idx, opt.value))
|
||||
if opt.desc:
|
||||
for l in opt.desc:
|
||||
sys.stdout.write(" %s\n" % l)
|
||||
if default_value == opt.value:
|
||||
def_idx = idx
|
||||
idx += 1
|
||||
isChoice = True
|
||||
prompt += " [%d]: " % def_idx
|
||||
else:
|
||||
prompt += " [%s]: " % node_text
|
||||
|
||||
if self.currentNode.input.echo == "password":
|
||||
value = getpass.getpass(prompt)
|
||||
else:
|
||||
value = py3input(prompt)
|
||||
|
||||
if not value:
|
||||
value = default_value
|
||||
elif value == ".help":
|
||||
if self.currentNode.input.desc:
|
||||
sys.stdout.write("\n%s\n\n" %
|
||||
"\n".join(self.currentNode.input.desc))
|
||||
else:
|
||||
sys.stdout.write("\nSorry, no help available.\n\n")
|
||||
continue
|
||||
elif value == ".back":
|
||||
self.prevStep()
|
||||
continue
|
||||
elif value == ".quit":
|
||||
raise StopIteration()
|
||||
elif value.startswith("."):
|
||||
sys.stdout.write("Unknown command. Values starting with '.' are handled has commands such as\n"
|
||||
"'.help', '.quit' or '.back'. To use a leading dot in a value, escape it with '\'\n"
|
||||
"e.g. '\\.color'\n")
|
||||
continue
|
||||
else:
|
||||
# Replace leading \. with .
|
||||
if value.startswith('\\.'):
|
||||
value = value[1:]
|
||||
|
||||
if isChoice:
|
||||
try:
|
||||
idx = int(value)
|
||||
except ValueError:
|
||||
idx = -1
|
||||
if idx < 0 or idx >= len(self.currentNode.input.options):
|
||||
sys.stdout.write("\nEnter a number between 0 and %d\n\n" % (
|
||||
len(self.currentNode.input.options)-1))
|
||||
continue
|
||||
value = self.currentNode.input.options[idx].value
|
||||
|
||||
if self.currentNode.input.type and self.currentNode.input.type == "boolean":
|
||||
if not value in ["yes", "no"]:
|
||||
sys.stdout.write("Please enter 'yes' or 'no'\n")
|
||||
continue
|
||||
|
||||
if value == "yes":
|
||||
value = "true"
|
||||
else:
|
||||
value = "false"
|
||||
|
||||
self.currentNode.value = value
|
||||
self.nextStep()
|
||||
|
||||
@staticmethod
|
||||
def valueToString(node):
|
||||
if not node.input.type:
|
||||
if node.value is None:
|
||||
return ""
|
||||
return node.value
|
||||
|
||||
if node.input.type == "boolean":
|
||||
if node.value == "true":
|
||||
return "yes"
|
||||
if node.value == "false":
|
||||
return "no"
|
||||
return "yes"
|
||||
|
||||
if node.value is None:
|
||||
return ""
|
||||
return node.value
|
||||
|
||||
def prevStep(self):
|
||||
if len(self.paths) == 0:
|
||||
sys.stdout.write("No previous step available\n")
|
||||
return
|
||||
|
||||
self.currentNode = self.paths.pop()
|
||||
|
||||
def nextStep(self):
|
||||
self.currentNode.activeChild = None
|
||||
self.paths.append(self.currentNode)
|
||||
|
||||
# Choice?
|
||||
if self.currentNode.isOption:
|
||||
child = self.currentNode.child
|
||||
while not child is None:
|
||||
if child.optionValue == self.currentNode.value:
|
||||
if not child.child is None:
|
||||
self.currentNode.activeChild = child
|
||||
self.currentNode = child.child
|
||||
return
|
||||
|
||||
break
|
||||
child = child.next
|
||||
|
||||
next = self.currentNode.next
|
||||
while next is None and not self.currentNode.parent is None:
|
||||
self.currentNode = self.currentNode.parent
|
||||
if not self.currentNode.optionValue is None:
|
||||
continue
|
||||
next = self.currentNode.next
|
||||
|
||||
self.currentNode = next
|
||||
645
lib/python/seiscomp/shell.py
Normal file
645
lib/python/seiscomp/shell.py
Normal file
@ -0,0 +1,645 @@
|
||||
############################################################################
|
||||
# Copyright (C) by gempa GmbH, GFZ Potsdam #
|
||||
# #
|
||||
# You can redistribute and/or modify this program under the #
|
||||
# terms of the SeisComP Public License. #
|
||||
# #
|
||||
# This program is distributed in the hope that it will be useful, #
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
|
||||
# SeisComP Public License for more details. #
|
||||
############################################################################
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
import glob
|
||||
|
||||
# Python version depended string conversion
|
||||
if sys.version_info[0] < 3:
|
||||
py3input = raw_input #pylint: disable=E0602
|
||||
else:
|
||||
py3input = input
|
||||
|
||||
|
||||
def split_tokens(line):
|
||||
return line.split()
|
||||
|
||||
|
||||
def convert_wildcard(s):
|
||||
wild = s.split(".")
|
||||
if len(wild) > 2:
|
||||
raise Exception("station selector: only one dot allowed")
|
||||
|
||||
# Add station wildcard if only network is given
|
||||
if len(wild) == 1:
|
||||
wild.append('*')
|
||||
|
||||
return '_'.join([x if x else '*' for x in wild])
|
||||
|
||||
|
||||
def convert_stations(s):
|
||||
toks = s.split(".")
|
||||
if len(toks) != 2:
|
||||
raise Exception("station: expected format: NET.STA")
|
||||
return '_'.join(toks)
|
||||
|
||||
|
||||
class CLI:
|
||||
"""
|
||||
Simple console shell.
|
||||
"""
|
||||
|
||||
def __init__(self, env = None):
|
||||
self.env = env
|
||||
|
||||
def run(self, env):
|
||||
self.env = env
|
||||
|
||||
sys.stdout.write('''\
|
||||
%s
|
||||
SeisComP shell
|
||||
%s
|
||||
|
||||
Welcome to the SeisComP interactive shell. You can get help about
|
||||
available commands with 'help'. 'exit' leaves the shell.
|
||||
|
||||
''' % (("="*80), ("="*80)))
|
||||
|
||||
prompt = "$ "
|
||||
while True:
|
||||
line = py3input(prompt).strip()
|
||||
toks = split_tokens(line)
|
||||
if len(toks) == 0:
|
||||
continue
|
||||
|
||||
if line in ("exit", "quit"):
|
||||
break
|
||||
|
||||
self.handleCommand(toks[0], toks[1:])
|
||||
|
||||
def handleCommand(self, cmd, args):
|
||||
try:
|
||||
if cmd == "help":
|
||||
return self.commandHelp(args)
|
||||
if cmd == "list":
|
||||
return self.commandList(args)
|
||||
if cmd == "delete":
|
||||
return self.commandDelete(args)
|
||||
if cmd == "print":
|
||||
return self.commandPrint(args)
|
||||
if cmd == "set":
|
||||
return self.commandSet(args)
|
||||
if cmd == "remove":
|
||||
return self.commandRemove(args)
|
||||
|
||||
raise Exception("Unknown command: %s" % cmd)
|
||||
except Exception as e:
|
||||
sys.stdout.write("%s\n" % str(e))
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def commandHelp(args):
|
||||
if len(args) == 0:
|
||||
sys.stdout.write("""\
|
||||
Commands:
|
||||
list stations
|
||||
Lists all available stations keys.
|
||||
|
||||
list profiles {mod}
|
||||
Lists all available profiles of a module.
|
||||
|
||||
list modules {sta}
|
||||
Lists all bound modules of a station incl. profiles (if used).
|
||||
|
||||
delete profile {mod} {profile}
|
||||
Deletes the given profile of given module. If the profile does not exist an
|
||||
error is raised.
|
||||
The module is removed from all stations that are using this profile.
|
||||
|
||||
delete binding {mod} {sta}
|
||||
Deletes the binding for given module and station. If the station is bound
|
||||
to module mod using a profile the binding is kept, removed otherwise.
|
||||
An existing binding file (etc/key/[mod]/station_[sta]) is deleted in any
|
||||
case.
|
||||
|
||||
print station {sta}
|
||||
Dumps all set binding parameters for the given station.
|
||||
|
||||
set profile {mod} {profile} {sta-sel}
|
||||
Sets for all selected stations a binding profile of a module.
|
||||
The resulting station file looks like this:
|
||||
...
|
||||
mod:profile
|
||||
...
|
||||
|
||||
This command checks for the existence of the specified profile
|
||||
|
||||
set module {mod} {sta-sel}
|
||||
Binds all selected stations to given module. No profiles are used
|
||||
and if any of the stations is already using a profile it is removed.
|
||||
The resulting station key file looks like this:
|
||||
...
|
||||
mod
|
||||
...
|
||||
|
||||
remove profile {mod} {profile} {sta-sel}
|
||||
Removes the binding profile of given module for all selected stations if
|
||||
module is bound already to that station.
|
||||
As a result all selected stations that are bound to the given module already
|
||||
will use a station key file afterwards.
|
||||
|
||||
mod:profile -> mod
|
||||
|
||||
remove module {mod} {sta-sel}
|
||||
Unbinds given module from selected stations. The line that refers to the
|
||||
given module is completely removed from the station key files.
|
||||
|
||||
exit
|
||||
Exit the shell.
|
||||
|
||||
quit
|
||||
Alias for exit.
|
||||
""")
|
||||
|
||||
def commandList(self, args):
|
||||
if len(args) == 0:
|
||||
raise Exception("Missing operand")
|
||||
|
||||
if args[0] == "stations":
|
||||
if len(args) > 2:
|
||||
raise Exception("Too many arguments")
|
||||
|
||||
if len(args) > 1:
|
||||
wild = convert_wildcard(args[1])
|
||||
else:
|
||||
wild = "*"
|
||||
|
||||
stas = []
|
||||
for f in sorted(glob.glob(os.path.join(self.env.key_dir, "station_" + wild))):
|
||||
stas.append(os.path.basename(f)[8:].replace("_", "."))
|
||||
|
||||
for s in stas:
|
||||
print(s)
|
||||
|
||||
return True
|
||||
|
||||
if args[0] == "profiles":
|
||||
if len(args) > 2:
|
||||
raise Exception("Too many arguments")
|
||||
if len(args) < 2:
|
||||
raise Exception("Expected: mod")
|
||||
|
||||
module = args[1]
|
||||
|
||||
for f in sorted(glob.glob(os.path.join(self.env.key_dir, module, "profile_*"))):
|
||||
print(os.path.basename(f)[8:])
|
||||
|
||||
return True
|
||||
|
||||
if args[0] == "modules":
|
||||
if len(args) > 2:
|
||||
raise Exception("Too many arguments")
|
||||
if len(args) < 2:
|
||||
raise Exception("Expected: sta")
|
||||
|
||||
sta = convert_stations(args[1])
|
||||
|
||||
f = os.path.join(self.env.key_dir, "station_" + sta)
|
||||
if not os.path.exists(f):
|
||||
raise Exception("%s: station key does not exists" % args[1])
|
||||
|
||||
for l in [line.strip() for line in open(f, "r").readlines()]:
|
||||
if l.startswith("#"):
|
||||
continue
|
||||
if len(l) == 0:
|
||||
continue
|
||||
print(l)
|
||||
|
||||
return True
|
||||
|
||||
raise Exception("Invalid argument: %s" % args[0])
|
||||
|
||||
def commandDelete(self, args):
|
||||
if len(args) == 0:
|
||||
raise Exception("Missing operand")
|
||||
|
||||
if args[0] == "profile":
|
||||
if len(args) > 3:
|
||||
raise Exception("Too many arguments")
|
||||
if len(args) < 3:
|
||||
raise Exception("Expected: mod profile")
|
||||
|
||||
module = args[1]
|
||||
profile = args[2]
|
||||
|
||||
if not os.path.exists(os.path.join(self.env.key_dir, module, "profile_" + profile)):
|
||||
raise Exception("%s/%s: profile not found" % (module, profile))
|
||||
|
||||
os.remove(os.path.join(self.env.key_dir,
|
||||
module, "profile_" + profile))
|
||||
|
||||
modified = 0
|
||||
for f in glob.glob(os.path.join(self.env.key_dir, "station_*")):
|
||||
lines = [line.strip() for line in open(f, "r").readlines()]
|
||||
|
||||
new_lines = []
|
||||
is_modified = False
|
||||
|
||||
for line in lines:
|
||||
# Comment line
|
||||
if line.startswith("#"):
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
# Empty line
|
||||
if not line:
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
toks = line.split(':')
|
||||
|
||||
# Wrong module name
|
||||
if toks[0] != module:
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
# Profile found
|
||||
if len(toks) > 1 and toks[1] == profile:
|
||||
# Filter line
|
||||
is_modified = True
|
||||
continue
|
||||
|
||||
new_lines.append(line)
|
||||
|
||||
if is_modified:
|
||||
modified += 1
|
||||
try:
|
||||
open(f, "w").write('\n'.join(new_lines))
|
||||
except Exception as e:
|
||||
sys.stdout.write("%s: %s\n" % (f, str(e)))
|
||||
|
||||
sys.stdout.write("OK, %d files modified\n" % modified)
|
||||
|
||||
return True
|
||||
|
||||
if args[0] == "binding":
|
||||
if len(args) > 3:
|
||||
raise Exception("Too many arguments")
|
||||
if len(args) < 3:
|
||||
raise Exception("Expected: mod profile")
|
||||
|
||||
module = args[1]
|
||||
sta = convert_stations(args[2])
|
||||
|
||||
if not os.path.exists(os.path.join(self.env.key_dir, module, "station_" + sta)):
|
||||
raise Exception("%s/%s: binding not found" % (module, args[2]))
|
||||
|
||||
os.remove(os.path.join(self.env.key_dir, module, "station_" + sta))
|
||||
|
||||
f = os.path.join(self.env.key_dir, "station_" + sta)
|
||||
try:
|
||||
lines = [line.strip() for line in open(f, "r").readlines()]
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
new_lines = []
|
||||
is_modified = False
|
||||
|
||||
for line in lines:
|
||||
# Comment line
|
||||
if line.startswith("#"):
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
# Empty line
|
||||
if len(line) == 0:
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
toks = line.split(':')
|
||||
|
||||
# Wrong module name
|
||||
if toks[0] != module:
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
# Profile found
|
||||
if len(toks) == 1:
|
||||
# Filter line
|
||||
is_modified = True
|
||||
continue
|
||||
|
||||
new_lines.append(line)
|
||||
|
||||
if is_modified:
|
||||
try:
|
||||
open(f, "w").write('\n'.join(new_lines))
|
||||
except Exception as e:
|
||||
sys.stdout.write("%s: %s\n" % (f, str(e)))
|
||||
|
||||
return True
|
||||
|
||||
raise Exception("Invalid argument: %s" % args[0])
|
||||
|
||||
def commandPrint(self, args):
|
||||
if len(args) == 0:
|
||||
raise Exception("Missing operand")
|
||||
|
||||
if args[0] == "station":
|
||||
if len(args) != 2:
|
||||
raise Exception("missing argument, expected: sta")
|
||||
|
||||
sta = convert_stations(args[1])
|
||||
key = os.path.join(self.env.key_dir, "station_" + sta)
|
||||
try:
|
||||
lines = [line.strip() for line in open(key, "r").readlines()]
|
||||
except IOError as e:
|
||||
raise Exception("%s: station not configured" % sta)
|
||||
except Exception as e:
|
||||
raise Exception("%s: unexpected error: %s" % (sta, str(e)))
|
||||
|
||||
first = True
|
||||
|
||||
for line in lines:
|
||||
# Comment line
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
# Empty line
|
||||
if len(line) == 0:
|
||||
continue
|
||||
|
||||
toks = line.split(':')
|
||||
|
||||
if len(toks) == 1:
|
||||
binding = os.path.join(
|
||||
self.env.key_dir, toks[0], "station_" + sta)
|
||||
else:
|
||||
binding = os.path.join(
|
||||
self.env.key_dir, toks[0], "profile_" + toks[1])
|
||||
|
||||
if not first:
|
||||
sys.stdout.write("\n")
|
||||
|
||||
first = False
|
||||
sys.stdout.write("[%s]\n" % toks[0])
|
||||
sys.stdout.write("%s\n" % binding)
|
||||
try:
|
||||
data = open(binding).read()
|
||||
sys.stdout.write("-"*80 + "\n")
|
||||
sys.stdout.write(data)
|
||||
sys.stdout.write("-"*80 + "\n")
|
||||
except IOError as e:
|
||||
sys.stdout.write("!binding not found\n")
|
||||
except Exception as e:
|
||||
sys.stdout.write("!unexpected error: %s\n" % str(e))
|
||||
|
||||
else:
|
||||
raise Exception("Invalid argument: %s" % args[0])
|
||||
|
||||
def commandSet(self, args):
|
||||
if len(args) == 0:
|
||||
raise Exception("Missing operand")
|
||||
|
||||
if args[0] == "profile":
|
||||
if len(args) != 4:
|
||||
raise Exception(
|
||||
"missing arguments, expected: module profile station-selector")
|
||||
|
||||
module = args[1]
|
||||
profile = args[2]
|
||||
|
||||
wild = convert_wildcard(args[3])
|
||||
|
||||
if not os.path.exists(os.path.join(self.env.key_dir, module, "profile_" + profile)):
|
||||
raise Exception("%s/%s: profile not found" % (module, profile))
|
||||
|
||||
modified = 0
|
||||
for f in glob.glob(os.path.join(self.env.key_dir, "station_" + wild)):
|
||||
lines = [line.strip() for line in open(f, "r").readlines()]
|
||||
|
||||
module_found = False
|
||||
is_modified = False
|
||||
|
||||
for i in range(len(lines)): #pylint: disable=C0200
|
||||
line = lines[i]
|
||||
|
||||
# Comment line
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
# Empty line
|
||||
if len(line) == 0:
|
||||
continue
|
||||
|
||||
toks = line.split(':')
|
||||
|
||||
# Wrong module name
|
||||
if toks[0] != module:
|
||||
continue
|
||||
|
||||
module_found = True
|
||||
|
||||
# No profile
|
||||
if len(toks) == 1:
|
||||
toks.append("")
|
||||
# Profile already set
|
||||
elif toks[1] == profile:
|
||||
continue
|
||||
|
||||
toks[1] = profile
|
||||
lines[i] = ':'.join(toks)
|
||||
|
||||
is_modified = True
|
||||
|
||||
if not module_found:
|
||||
lines.append("%s:%s\n" % (module, profile))
|
||||
is_modified = True
|
||||
|
||||
if is_modified:
|
||||
modified += 1
|
||||
try:
|
||||
open(f, "w").write('\n'.join(lines))
|
||||
except Exception as e:
|
||||
sys.stdout.write("%s: %s\n" % (f, str(e)))
|
||||
|
||||
sys.stdout.write("OK, %d files modified\n" % modified)
|
||||
|
||||
return True
|
||||
|
||||
if args[0] == "module":
|
||||
if len(args) != 3:
|
||||
raise Exception(
|
||||
"missing arguments, expected: module station-selector")
|
||||
|
||||
module = args[1]
|
||||
|
||||
wild = convert_wildcard(args[2])
|
||||
|
||||
modified = 0
|
||||
for f in glob.glob(os.path.join(self.env.key_dir, "station_" + wild)):
|
||||
lines = [line.strip() for line in open(f, "r").readlines()]
|
||||
|
||||
module_found = False
|
||||
is_modified = False
|
||||
|
||||
for i in range(len(lines)): #pylint: disable=C0200
|
||||
line = lines[i]
|
||||
|
||||
# Comment line
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
# Empty line
|
||||
if len(line) == 0:
|
||||
continue
|
||||
|
||||
toks = line.split(':')
|
||||
|
||||
# Wrong module name
|
||||
if toks[0] != module:
|
||||
continue
|
||||
|
||||
module_found = True
|
||||
|
||||
lines[i] = module
|
||||
|
||||
is_modified = True
|
||||
|
||||
if not module_found:
|
||||
lines.append("%s\n" % module)
|
||||
is_modified = True
|
||||
|
||||
if is_modified:
|
||||
modified += 1
|
||||
try:
|
||||
open(f, "w").write('\n'.join(lines))
|
||||
except Exception as e:
|
||||
sys.stdout.write("%s: %s\n" % (f, str(e)))
|
||||
|
||||
sys.stdout.write("OK, %d files modified\n" % modified)
|
||||
|
||||
return True
|
||||
|
||||
raise Exception("Invalid argument: %s" % args[0])
|
||||
|
||||
def commandRemove(self, args):
|
||||
if len(args) == 0:
|
||||
raise Exception("Missing operand")
|
||||
|
||||
if args[0] == "profile":
|
||||
if len(args) != 4:
|
||||
raise Exception(
|
||||
"Missing arguments, expected: module profile station-selector")
|
||||
|
||||
module = args[1]
|
||||
profile = args[2]
|
||||
|
||||
wild = convert_wildcard(args[3])
|
||||
|
||||
modified = 0
|
||||
for f in glob.glob(os.path.join(self.env.key_dir, "station_" + wild)):
|
||||
lines = [line.strip() for line in open(f, "r").readlines()]
|
||||
|
||||
is_modified = False
|
||||
for i in range(len(lines)): #pylint: disable=C0200
|
||||
line = lines[i]
|
||||
|
||||
# Comment line
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
|
||||
# Empty line
|
||||
if len(line) == 0:
|
||||
continue
|
||||
|
||||
toks = line.split(':')
|
||||
|
||||
# No profile
|
||||
if len(toks) == 1:
|
||||
continue
|
||||
|
||||
# Wrong module name
|
||||
if toks[0] != module:
|
||||
continue
|
||||
|
||||
# Wrong profile name
|
||||
if toks[1] != profile:
|
||||
continue
|
||||
|
||||
lines[i] = module
|
||||
is_modified = True
|
||||
|
||||
continue
|
||||
|
||||
if is_modified:
|
||||
modified += 1
|
||||
|
||||
if (len(lines) > 0) and (len(lines[-1]) > 0):
|
||||
lines.append("")
|
||||
|
||||
try:
|
||||
open(f, "w").write('\n'.join(lines))
|
||||
except Exception as e:
|
||||
sys.stdout.write("%s: %s\n" % (f, str(e)))
|
||||
|
||||
sys.stdout.write("OK, %d files modified\n" % modified)
|
||||
|
||||
return True
|
||||
|
||||
if args[0] == "module":
|
||||
if len(args) != 3:
|
||||
raise Exception(
|
||||
"Missing arguments, expected: module station-selector")
|
||||
|
||||
module = args[1]
|
||||
|
||||
wild = convert_wildcard(args[2])
|
||||
|
||||
modified = 0
|
||||
for f in glob.glob(os.path.join(self.env.key_dir, "station_" + wild)):
|
||||
lines = [line.strip() for line in open(f, "r").readlines()]
|
||||
new_lines = []
|
||||
|
||||
is_modified = False
|
||||
for line in lines:
|
||||
# Comment line
|
||||
if line.startswith("#"):
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
# Empty line
|
||||
if len(line) == 0:
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
toks = line.split(':')
|
||||
|
||||
# Wrong module name
|
||||
if toks[0] != module:
|
||||
new_lines.append(line)
|
||||
continue
|
||||
|
||||
# Filter line
|
||||
is_modified = True
|
||||
|
||||
if is_modified:
|
||||
modified += 1
|
||||
if (len(new_lines) > 0) and (len(new_lines[-1]) > 0):
|
||||
new_lines.append("")
|
||||
|
||||
try:
|
||||
open(f, "w").write('\n'.join(new_lines))
|
||||
except Exception as e:
|
||||
sys.stdout.write("%s: %s\n" % (f, str(e)))
|
||||
|
||||
try:
|
||||
os.remove(os.path.join(self.env.key_dir,
|
||||
module, os.path.basename(f)))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
sys.stdout.write("OK, %d files modified\n" % modified)
|
||||
|
||||
return True
|
||||
|
||||
raise Exception("Invalid argument: %s" % args[0])
|
||||
260
lib/python/seiscomp/slclient.py
Normal file
260
lib/python/seiscomp/slclient.py
Normal file
@ -0,0 +1,260 @@
|
||||
import os, sys, tempfile
|
||||
import datetime, time, re
|
||||
from seiscomp import mseedlite as mseed
|
||||
|
||||
def _timeparse(t, format):
|
||||
"""Parse a time string that might contain fractions of a second.
|
||||
|
||||
Fractional seconds are supported using a fragile, miserable hack.
|
||||
Given a time string like '02:03:04.234234' and a format string of
|
||||
'%H:%M:%S', time.strptime() will raise a ValueError with this
|
||||
message: 'unconverted data remains: .234234'. If %S is in the
|
||||
format string and the ValueError matches as above, a datetime
|
||||
object will be created from the part that matches and the
|
||||
microseconds in the time string.
|
||||
"""
|
||||
try:
|
||||
return datetime.datetime(*time.strptime(t, format)[0:6]).time()
|
||||
except ValueError as msg:
|
||||
if "%S" in format:
|
||||
msg = str(msg)
|
||||
mat = re.match(r"unconverted data remains:"
|
||||
" \.([0-9]{1,6})$", msg)
|
||||
if mat is not None:
|
||||
# fractional seconds are present - this is the style
|
||||
# used by datetime's isoformat() method
|
||||
frac = "." + mat.group(1)
|
||||
t = t[:-len(frac)]
|
||||
t = datetime.datetime(*time.strptime(t, format)[0:6])
|
||||
microsecond = int(float(frac)*1e6)
|
||||
return t.replace(microsecond=microsecond)
|
||||
else:
|
||||
mat = re.match(r"unconverted data remains:"
|
||||
" \,([0-9]{3,3})$", msg)
|
||||
if mat is not None:
|
||||
# fractional seconds are present - this is the style
|
||||
# used by the logging module
|
||||
frac = "." + mat.group(1)
|
||||
t = t[:-len(frac)]
|
||||
t = datetime.datetime(*time.strptime(t, format)[0:6])
|
||||
microsecond = int(float(frac)*1e6)
|
||||
return t.replace(microsecond=microsecond)
|
||||
|
||||
raise
|
||||
|
||||
def timeparse(t):
|
||||
return _timeparse(t, "%Y/%m/%d %H:%M:%S")
|
||||
|
||||
|
||||
class Input(mseed.Input):
|
||||
|
||||
def __init__(self, server, streams,
|
||||
stime=None, etime=None, timeout=None, verbose=0):
|
||||
|
||||
# XXX Add the possibility for supplying stime and etime as
|
||||
# individual times for each stream.
|
||||
|
||||
"""
|
||||
'streams' must be a list containing tuples of (net,sta,loc,cha)
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
|
||||
streams = [ "%-3s %5s %s%3s.D" % s for s in streams ]
|
||||
streams.sort()
|
||||
|
||||
self.tmp = tempfile.NamedTemporaryFile(mode="w", prefix="slinktool.")
|
||||
self.tmp.write("\n".join(streams)+"\n")
|
||||
self.tmp.flush()
|
||||
if verbose:
|
||||
sys.stderr.write("\n".join(streams)+"\n")
|
||||
|
||||
slinktool = os.getenv("SLINKTOOL")
|
||||
if not slinktool:
|
||||
slinktool = "slinktool"
|
||||
args = [slinktool, "-l", self.tmp.name, "-o", "-"]
|
||||
if stime:
|
||||
args.append("-tw")
|
||||
tw = "%d,%d,%d,%d,%d,%d:" % (stime.year,stime.month,stime.day,stime.hour,stime.minute,stime.second)
|
||||
if etime:
|
||||
rw += "%d,%d,%d,%d,%d,%d" % (etime.year,etime.month,etime.day,etime.hour,etime.minute,etime.second)
|
||||
args.append(tw)
|
||||
if verbose: args.append("-v")
|
||||
|
||||
if timeout:
|
||||
try: assert int(timeout) > 0
|
||||
except: raise TypeError("illegal timeout parameter")
|
||||
args += ["-nt", "%d" % int(timeout)]
|
||||
|
||||
args.append(server)
|
||||
# start 'slinktool' as sub-process
|
||||
self.popen = subprocess.Popen(args, stdout=subprocess.PIPE, shell=False)
|
||||
infile = self.popen.stdout
|
||||
|
||||
mseed.Input.__init__(self, infile)
|
||||
|
||||
def __del__(self):
|
||||
"""
|
||||
Shut down SeedLink connections and close input.
|
||||
"""
|
||||
sys.stderr.write("shutting down slinktool\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
slinktool_pid = self.popen.pid
|
||||
# It would of course be much better to send SIGTERM,
|
||||
# but somehow slinktool often appears to ignore it.
|
||||
# XXX Need to figure out why, and perhaps fix it (not critical).
|
||||
self.popen.kill()
|
||||
self.popen.communicate()
|
||||
# mseed.Input.__del__(self) # closes the input file
|
||||
|
||||
|
||||
|
||||
class Input2(mseed.Input):
|
||||
|
||||
def __init__(self, server, streams, stime=None, etime=None, verbose=0):
|
||||
|
||||
"""
|
||||
XXX information not uptodate!!! XXX
|
||||
|
||||
'streams' must be a dict containing tuples of (stime, etime),
|
||||
with the key being the stream_id and stime and etime being
|
||||
the starting and end time of the time window, respectively.
|
||||
The times must be seis.Time objects. For instance
|
||||
|
||||
stime = seis.Time(...)
|
||||
etime = seis.Time(...)
|
||||
streams["GE.KBS.00.BHZ.D"] = (stime, etime)
|
||||
|
||||
It is more efficient to request the same time interval for
|
||||
all streams. Wildcards for the channels are allowed. If
|
||||
stime is None, only new data are retrieved as they come in.
|
||||
"""
|
||||
|
||||
streams = [ "%-3s %5s %s%3s.D" % tuple(s.split(".")[:4])
|
||||
for s in streams ]
|
||||
streams.sort()
|
||||
|
||||
self.tmp = tempfile.NamedTemporaryFile(mode="w", prefix="slinktool.")
|
||||
self.tmp.write("\n".join(streams)+"\n")
|
||||
sys.stderr.write("\n".join(streams)+"\n")
|
||||
self.tmp.flush()
|
||||
|
||||
cmd = "slinktool -l %s -o -" % self.tmp.name
|
||||
if stime:
|
||||
assert isinstance(stime, seis.Time)
|
||||
cmd += " -tw %d,%d,%d,%d,%d,%d:" % stime.asDate
|
||||
if etime:
|
||||
assert isinstance(etime, seis.Time)
|
||||
cmd += "%d,%d,%d,%d,%d,%d" % etime.asDate
|
||||
cmd = cmd + "%s '%s'" % (verbose*" -v", server)
|
||||
|
||||
infile = os.popen(cmd)
|
||||
|
||||
mseed.Input.__init__(self, infile)
|
||||
|
||||
|
||||
def available(server="localhost:18000",
|
||||
time_window=None, stream_ids=None, verbose=0):
|
||||
|
||||
"""
|
||||
Connects to server and returns a dictionary of lists of available
|
||||
time windows as tuples of (start_time, end_time) for each available
|
||||
stream. The stream set can be limited by specifying a list of
|
||||
stream_ids in the format usual format, i.e. net.sta.loc.cha.type,
|
||||
e.g. "GE.KBS.00.BHZ.D".
|
||||
Note that often the returned lists contain only one time tuple,
|
||||
corresponding to one contiguous time window available.
|
||||
|
||||
NEW:
|
||||
The search for available data can be limited to a time window by
|
||||
specifying the "time_window" parameter, which must be a tuple
|
||||
containing the starting and end time as seis.Time objects.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
if time_window:
|
||||
stime, etime = time_window
|
||||
assert stime <= etime
|
||||
else:
|
||||
stime, etime = None, None
|
||||
|
||||
cmd = "slinktool -Q %s %s " % (verbose*"-v ", server)
|
||||
infile = os.popen(cmd)
|
||||
windows = {}
|
||||
|
||||
# parse the output of "slinktool -Q"
|
||||
# It is assumed that the lines consist of the fields
|
||||
# net,sta,[loc,], cha, type, date1, time1, "-", date2, time2
|
||||
# Since the location code (loc) may or may not be present, we
|
||||
# determine the position of the dash "-" to determine where the
|
||||
# other fields are.
|
||||
regex = re.compile("^[A-Z][A-Z]\ [A-Z].*[12][0-9]{3}(/[0-9]{2}){2}.*$")
|
||||
for line in infile:
|
||||
if regex.match(line): # line containing a time window, a bit crude
|
||||
|
||||
line = line.split()
|
||||
try:
|
||||
dash = line.index("-")
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
if dash==7: # location code is present
|
||||
loc = line[2]
|
||||
else: loc = ""
|
||||
|
||||
net, sta, cha, typ = line[0], line[1], line[dash-4], line[dash-3]
|
||||
|
||||
stream_id = "%s.%s.%s.%s.%s" % (net, sta, loc, cha, typ)
|
||||
|
||||
if stream_ids and stream_id not in stream_ids:
|
||||
continue
|
||||
|
||||
t1 = seis.Time("%s %s" % (line[dash-2], line[dash-1]))
|
||||
t2 = seis.Time("%s %s" % (line[dash+1], line[dash+2]))
|
||||
|
||||
if stime and t2<stime or etime and t1>etime:
|
||||
continue # non-overlapping time windows
|
||||
|
||||
if stime and t1<stime:
|
||||
t1 = stime
|
||||
if etime and t2>etime:
|
||||
t2 = etime
|
||||
|
||||
if not stream_id in windows:
|
||||
windows[stream_id] = []
|
||||
|
||||
windows[stream_id].append((t1,t2))
|
||||
|
||||
elif verbose:
|
||||
# probably some diagnostic output
|
||||
sys.stdout.write("%s\n" % line.strip())
|
||||
|
||||
return windows
|
||||
|
||||
|
||||
def server_version(host, port=18000):
|
||||
|
||||
import socket
|
||||
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
try:
|
||||
s.connect((host, port))
|
||||
except:
|
||||
return None
|
||||
s.send("HELLO\n")
|
||||
data = s.recv(1024)
|
||||
s.close()
|
||||
if data[:8] != "SeedLink":
|
||||
return None
|
||||
|
||||
return data[10:13]
|
||||
|
||||
|
||||
def server_running(host, port=18000):
|
||||
|
||||
if server_version(host, port):
|
||||
return True
|
||||
|
||||
return False
|
||||
2447
lib/python/seiscomp/system.py
Normal file
2447
lib/python/seiscomp/system.py
Normal file
File diff suppressed because it is too large
Load Diff
245
lib/python/seiscomp/utils.py
Normal file
245
lib/python/seiscomp/utils.py
Normal file
@ -0,0 +1,245 @@
|
||||
# This file was automatically generated by SWIG (http://www.swig.org).
|
||||
# Version 4.0.2
|
||||
#
|
||||
# Do not make changes to this file unless you know what you are doing--modify
|
||||
# the SWIG interface file instead.
|
||||
|
||||
from sys import version_info as _swig_python_version_info
|
||||
if _swig_python_version_info < (2, 7, 0):
|
||||
raise RuntimeError("Python 2.7 or later required")
|
||||
|
||||
# Import the low-level C/C++ module
|
||||
if __package__ or "." in __name__:
|
||||
from . import _utils
|
||||
else:
|
||||
import _utils
|
||||
|
||||
try:
|
||||
import builtins as __builtin__
|
||||
except ImportError:
|
||||
import __builtin__
|
||||
|
||||
def _swig_repr(self):
|
||||
try:
|
||||
strthis = "proxy of " + self.this.__repr__()
|
||||
except __builtin__.Exception:
|
||||
strthis = ""
|
||||
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_instance_variable(set):
|
||||
def set_instance_attr(self, name, value):
|
||||
if name == "thisown":
|
||||
self.this.own(value)
|
||||
elif name == "this":
|
||||
set(self, name, value)
|
||||
elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
|
||||
set(self, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add instance attributes to %s" % self)
|
||||
return set_instance_attr
|
||||
|
||||
|
||||
def _swig_setattr_nondynamic_class_variable(set):
|
||||
def set_class_attr(cls, name, value):
|
||||
if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
|
||||
set(cls, name, value)
|
||||
else:
|
||||
raise AttributeError("You cannot add class attributes to %s" % cls)
|
||||
return set_class_attr
|
||||
|
||||
|
||||
def _swig_add_metaclass(metaclass):
|
||||
"""Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
|
||||
def wrapper(cls):
|
||||
return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
|
||||
return wrapper
|
||||
|
||||
|
||||
class _SwigNonDynamicMeta(type):
|
||||
"""Meta class to enforce nondynamic attributes (no new attributes) for a class"""
|
||||
__setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
|
||||
|
||||
|
||||
class SwigPyIterator(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
raise AttributeError("No constructor defined - class is abstract")
|
||||
__repr__ = _swig_repr
|
||||
__swig_destroy__ = _utils.delete_SwigPyIterator
|
||||
|
||||
def value(self):
|
||||
return _utils.SwigPyIterator_value(self)
|
||||
|
||||
def incr(self, n=1):
|
||||
return _utils.SwigPyIterator_incr(self, n)
|
||||
|
||||
def decr(self, n=1):
|
||||
return _utils.SwigPyIterator_decr(self, n)
|
||||
|
||||
def distance(self, x):
|
||||
return _utils.SwigPyIterator_distance(self, x)
|
||||
|
||||
def equal(self, x):
|
||||
return _utils.SwigPyIterator_equal(self, x)
|
||||
|
||||
def copy(self):
|
||||
return _utils.SwigPyIterator_copy(self)
|
||||
|
||||
def next(self):
|
||||
return _utils.SwigPyIterator_next(self)
|
||||
|
||||
def __next__(self):
|
||||
return _utils.SwigPyIterator___next__(self)
|
||||
|
||||
def previous(self):
|
||||
return _utils.SwigPyIterator_previous(self)
|
||||
|
||||
def advance(self, n):
|
||||
return _utils.SwigPyIterator_advance(self, n)
|
||||
|
||||
def __eq__(self, x):
|
||||
return _utils.SwigPyIterator___eq__(self, x)
|
||||
|
||||
def __ne__(self, x):
|
||||
return _utils.SwigPyIterator___ne__(self, x)
|
||||
|
||||
def __iadd__(self, n):
|
||||
return _utils.SwigPyIterator___iadd__(self, n)
|
||||
|
||||
def __isub__(self, n):
|
||||
return _utils.SwigPyIterator___isub__(self, n)
|
||||
|
||||
def __add__(self, n):
|
||||
return _utils.SwigPyIterator___add__(self, n)
|
||||
|
||||
def __sub__(self, *args):
|
||||
return _utils.SwigPyIterator___sub__(self, *args)
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
# Register SwigPyIterator in _utils:
|
||||
_utils.SwigPyIterator_swigregister(SwigPyIterator)
|
||||
|
||||
|
||||
def basename(name):
|
||||
return _utils.basename(name)
|
||||
|
||||
def fileExists(file):
|
||||
return _utils.fileExists(file)
|
||||
|
||||
def pathExists(path):
|
||||
return _utils.pathExists(path)
|
||||
|
||||
def createPath(path):
|
||||
return _utils.createPath(path)
|
||||
|
||||
def removeExtension(name):
|
||||
return _utils.removeExtension(name)
|
||||
|
||||
def bytesToStreambuf(data, n):
|
||||
return _utils.bytesToStreambuf(data, n)
|
||||
|
||||
def stringToStreambuf(str):
|
||||
return _utils.stringToStreambuf(str)
|
||||
|
||||
def file2ostream(fn):
|
||||
return _utils.file2ostream(fn)
|
||||
|
||||
def file2istream(fn):
|
||||
return _utils.file2istream(fn)
|
||||
class StopWatch(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_utils.StopWatch_swiginit(self, _utils.new_StopWatch(*args))
|
||||
|
||||
def restart(self):
|
||||
return _utils.StopWatch_restart(self)
|
||||
|
||||
def reset(self):
|
||||
return _utils.StopWatch_reset(self)
|
||||
|
||||
def isActive(self):
|
||||
return _utils.StopWatch_isActive(self)
|
||||
|
||||
def elapsed(self):
|
||||
return _utils.StopWatch_elapsed(self)
|
||||
__swig_destroy__ = _utils.delete_StopWatch
|
||||
|
||||
# Register StopWatch in _utils:
|
||||
_utils.StopWatch_swigregister(StopWatch)
|
||||
|
||||
class Timer(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, timeoutseconds=0):
|
||||
_utils.Timer_swiginit(self, _utils.new_Timer(timeoutseconds))
|
||||
__swig_destroy__ = _utils.delete_Timer
|
||||
|
||||
def setTimeout(self, seconds):
|
||||
return _utils.Timer_setTimeout(self, seconds)
|
||||
|
||||
def setTimeout2(self, seconds, nanoseconds):
|
||||
return _utils.Timer_setTimeout2(self, seconds, nanoseconds)
|
||||
|
||||
def setCallback(self, arg2):
|
||||
return _utils.Timer_setCallback(self, arg2)
|
||||
|
||||
def setSingleShot(self, arg2):
|
||||
return _utils.Timer_setSingleShot(self, arg2)
|
||||
|
||||
def start(self):
|
||||
return _utils.Timer_start(self)
|
||||
|
||||
def stop(self):
|
||||
return _utils.Timer_stop(self)
|
||||
|
||||
def disable(self):
|
||||
return _utils.Timer_disable(self)
|
||||
|
||||
def isActive(self):
|
||||
return _utils.Timer_isActive(self)
|
||||
|
||||
# Register Timer in _utils:
|
||||
_utils.Timer_swigregister(Timer)
|
||||
|
||||
class UnitConversion(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
def __init__(self, *args):
|
||||
_utils.UnitConversion_swiginit(self, _utils.new_UnitConversion(*args))
|
||||
fromUnit = property(_utils.UnitConversion_fromUnit_get, _utils.UnitConversion_fromUnit_set)
|
||||
toUnit = property(_utils.UnitConversion_toUnit_get, _utils.UnitConversion_toUnit_set)
|
||||
toQMLUnit = property(_utils.UnitConversion_toQMLUnit_get, _utils.UnitConversion_toQMLUnit_set)
|
||||
toSEEDUnit = property(_utils.UnitConversion_toSEEDUnit_get, _utils.UnitConversion_toSEEDUnit_set)
|
||||
scale = property(_utils.UnitConversion_scale_get, _utils.UnitConversion_scale_set)
|
||||
__swig_destroy__ = _utils.delete_UnitConversion
|
||||
|
||||
# Register UnitConversion in _utils:
|
||||
_utils.UnitConversion_swigregister(UnitConversion)
|
||||
|
||||
class UnitConverter(object):
|
||||
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
|
||||
__repr__ = _swig_repr
|
||||
|
||||
@staticmethod
|
||||
def get(fromUnit):
|
||||
return _utils.UnitConverter_get(fromUnit)
|
||||
|
||||
def __init__(self):
|
||||
_utils.UnitConverter_swiginit(self, _utils.new_UnitConverter())
|
||||
__swig_destroy__ = _utils.delete_UnitConverter
|
||||
|
||||
# Register UnitConverter in _utils:
|
||||
_utils.UnitConverter_swigregister(UnitConverter)
|
||||
|
||||
def UnitConverter_get(fromUnit):
|
||||
return _utils.UnitConverter_get(fromUnit)
|
||||
|
||||
|
||||
|
||||
10
lib/python/seiscomp3/Client.py
Normal file
10
lib/python/seiscomp3/Client.py
Normal file
@ -0,0 +1,10 @@
|
||||
from seiscomp.client import *
|
||||
from seiscomp.system import PluginRegistry
|
||||
|
||||
import seiscomp3.Logging
|
||||
import seiscomp3.Config
|
||||
import seiscomp3.Core
|
||||
import seiscomp3.DataModel
|
||||
import seiscomp3.IO
|
||||
import seiscomp3.Math
|
||||
import seiscomp3.Utils
|
||||
1
lib/python/seiscomp3/Config.py
Normal file
1
lib/python/seiscomp3/Config.py
Normal file
@ -0,0 +1 @@
|
||||
from seiscomp.config import *
|
||||
1
lib/python/seiscomp3/Core.py
Normal file
1
lib/python/seiscomp3/Core.py
Normal file
@ -0,0 +1 @@
|
||||
from seiscomp.core import *
|
||||
6
lib/python/seiscomp3/DataModel.py
Normal file
6
lib/python/seiscomp3/DataModel.py
Normal file
@ -0,0 +1,6 @@
|
||||
from seiscomp.datamodel import *
|
||||
|
||||
import seiscomp3.IO
|
||||
import seiscomp3.Math
|
||||
import seiscomp3.Core
|
||||
|
||||
4
lib/python/seiscomp3/Geo.py
Normal file
4
lib/python/seiscomp3/Geo.py
Normal file
@ -0,0 +1,4 @@
|
||||
from seiscomp.geo import *
|
||||
|
||||
import seiscomp3.Math
|
||||
import seiscomp3.Core
|
||||
4
lib/python/seiscomp3/IO.py
Normal file
4
lib/python/seiscomp3/IO.py
Normal file
@ -0,0 +1,4 @@
|
||||
from seiscomp.io import *
|
||||
|
||||
import seiscomp3.Math
|
||||
import seiscomp3.Core
|
||||
1
lib/python/seiscomp3/Kernel.py
Normal file
1
lib/python/seiscomp3/Kernel.py
Normal file
@ -0,0 +1 @@
|
||||
from seiscomp.kernel import *
|
||||
1
lib/python/seiscomp3/Logging.py
Normal file
1
lib/python/seiscomp3/Logging.py
Normal file
@ -0,0 +1 @@
|
||||
from seiscomp.logging import *
|
||||
3
lib/python/seiscomp3/Math.py
Normal file
3
lib/python/seiscomp3/Math.py
Normal file
@ -0,0 +1,3 @@
|
||||
from seiscomp.math import *
|
||||
|
||||
import seiscomp3.Core
|
||||
7
lib/python/seiscomp3/Seismology.py
Normal file
7
lib/python/seiscomp3/Seismology.py
Normal file
@ -0,0 +1,7 @@
|
||||
from seiscomp.seismology import *
|
||||
|
||||
import seiscomp3.IO
|
||||
import seiscomp3.Math
|
||||
import seiscomp3.Core
|
||||
import seiscomp3.DataModel
|
||||
import seiscomp3.Config
|
||||
1
lib/python/seiscomp3/Setup.py
Normal file
1
lib/python/seiscomp3/Setup.py
Normal file
@ -0,0 +1 @@
|
||||
from seiscomp.setup import *
|
||||
1
lib/python/seiscomp3/Shell.py
Normal file
1
lib/python/seiscomp3/Shell.py
Normal file
@ -0,0 +1 @@
|
||||
from seiscomp.shell import *
|
||||
4
lib/python/seiscomp3/System.py
Normal file
4
lib/python/seiscomp3/System.py
Normal file
@ -0,0 +1,4 @@
|
||||
from seiscomp.system import *
|
||||
|
||||
import seiscomp3.Core
|
||||
import seiscomp3.Config
|
||||
1
lib/python/seiscomp3/Utils.py
Normal file
1
lib/python/seiscomp3/Utils.py
Normal file
@ -0,0 +1 @@
|
||||
from seiscomp.utils import *
|
||||
19
lib/python/seiscomp3/__init__.py
Normal file
19
lib/python/seiscomp3/__init__.py
Normal file
@ -0,0 +1,19 @@
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
sys.setdlopenflags(os.RTLD_LAZY | os.RTLD_GLOBAL)
|
||||
|
||||
# Since Python 3.2 DeprecationWarnings are ignored by default. Since Python 3.7
|
||||
# DeprecationWarnings are shown when triggered directly by code in __main__.
|
||||
# We enable DeprecationWarnings again unless warning options have been specified on the
|
||||
# command-line, e.g., -Wignore.
|
||||
if not sys.warnoptions:
|
||||
warnings.simplefilter("default", category=DeprecationWarning)
|
||||
|
||||
warnings.warn(
|
||||
"The SeisComP3 python API compatibility layer is deprecated and will be removed "
|
||||
"with SeisComP 7. Change your imports from 'seiscomp3' to 'seiscomp'.",
|
||||
DeprecationWarning,
|
||||
2
|
||||
)
|
||||
Reference in New Issue
Block a user