[seiscomp, scanloc] Install, add .gitignore

This commit is contained in:
2025-10-09 15:07:02 +02:00
commit 20f5301bb1
2848 changed files with 1315858 additions and 0 deletions

View File

View File

@ -0,0 +1,366 @@
from __future__ import print_function
import seiscomp.datamodel, seiscomp.core, seiscomp.config
from .helpers import parsers
import datetime
import sys
class sc3(object):
def _fillSc3(self, obj, att):
commentNum = 0
for (k, p) in att.items():
try:
if k == 'Comment':
# print('DEBUG: Adding comment', p)
if p.startswith('Grant'):
# 2020: These belong in DOI metadata, not here.
continue
c = seiscomp.datamodel.Comment()
c.setText(p)
c.setId(str(commentNum))
commentNum += 1
obj.add(c)
continue
if k == 'Pid':
# print('DEBUG: Adding Pid as comment', p)
c = seiscomp.datamodel.Comment()
(typ, val) = p.split(':', 1)
s = '{"type":"%s", "value":"%s"}' % (typ.upper(), val)
c.setText(s)
c.setId('FDSNXML:Identifier/' + str(commentNum))
commentNum += 1
obj.add(c)
continue
w = 'set' + k
p = self.sc3Valid['attributes'][k]['validator'](p)
getattr(obj, w)(p)
except Exception as e:
print("[Error] %s = %s (%s)" % (k, p, e),
file=sys.stderr)
@staticmethod
def getBool(val):
if val == "True" or val == 1:
return True
elif val == "False" or val == 0:
return False
else:
raise Exception("Invalid Boolean Value")
@staticmethod
def getString(data):
return data.strip()
@staticmethod
def getRealArray(data):
RA = seiscomp.datamodel.RealArray()
for r in map(float, data):
RA.content().push_back(r)
return RA
@staticmethod
def getComplexArray(data):
CA = seiscomp.datamodel.ComplexArray()
for (r,i) in data:
CA.content().push_back(complex(float(r),float(i)))
return CA
@staticmethod
def getDate(value):
if isinstance(value, datetime.datetime):
return seiscomp.core.Time(*(value.timetuple()[:6]))
elif isinstance(value, str):
value = parsers.parseDate(value)
return seiscomp.core.Time(*(value.timetuple()[:6]))
return value
@staticmethod
def getBlob(value):
b = seiscomp.datamodel.Blob()
b.setContent(value)
return b
@staticmethod
def getStationGroupType(val):
if val == "ARRAY":
return seiscomp.datamodel.ARRAY
elif val == "DEPLOYMENT":
return seiscomp.datamodel.DEPLOYMENT
else:
raise Exception("Invalid station group type")
@staticmethod
def _findValidOnes(mode):
valid = {
'dataloggerCalibration': {
'creator': seiscomp.datamodel.DataloggerCalibration,
'attributes': {
'SerialNumber': { 'validator': sc3.getString },
'Channel': { 'validator': int },
'Start': { 'validator': sc3.getDate },
'End': { 'validator': sc3.getDate },
'Gain': { 'validator': float },
'GainFrequency': { 'validator': float },
'Remark': { 'validator': sc3.getBlob }
}
},
'sensorCalibration': {
'creator': seiscomp.datamodel.SensorCalibration,
'attributes': {
'SerialNumber': { 'validator': sc3.getString },
'Channel': { 'validator': int },
'Start': { 'validator': sc3.getDate },
'End': { 'validator': sc3.getDate },
'Gain': { 'validator': float },
'GainFrequency': { 'validator': float },
'Remark': { 'validator': sc3.getBlob }
}
},
'channel': {
'creator': seiscomp.datamodel.Stream_Create,
'attributes': {
'Code': { 'validator': sc3.getString },
'Start': { 'validator': sc3.getDate },
'End': { 'validator': sc3.getDate },
'Datalogger': { 'validator': sc3.getString },
'DataloggerSerialNumber': { 'validator': sc3.getString },
'DataloggerChannel': { 'validator': int },
'Sensor': { 'validator': sc3.getString },
'SensorSerialNumber': { 'validator': sc3.getString },
'SensorChannel': { 'validator': int },
'ClockSerialNumber': { 'validator': sc3.getString },
'SampleRateNumerator': { 'validator': int },
'SampleRateDenominator': { 'validator': int },
'Depth': { 'validator': float },
'Azimuth': { 'validator': float },
'Dip': { 'validator': float },
'Gain': { 'validator': float },
'GainFrequency': { 'validator': float },
'GainUnit': { 'validator': sc3.getString },
'Format': { 'validator': sc3.getString },
'Flags': { 'validator': sc3.getString },
'Restricted': { 'validator': sc3.getBool },
'Shared': { 'validator': sc3.getBool }
}
},
'location': {
'creator': seiscomp.datamodel.SensorLocation_Create,
'attributes': {
'Code': { 'validator': sc3.getString },
'Start': { 'validator': sc3.getDate },
'End': { 'validator': sc3.getDate },
"Latitude": { 'validator': float },
"Longitude": { 'validator': float },
"Elevation": { 'validator': float }
}
},
'station': {
'creator': seiscomp.datamodel.Station_Create,
'attributes': {
'Code': { 'validator': sc3.getString },
'Start': { 'validator': sc3.getDate },
'End': { 'validator': sc3.getDate },
'Description': { 'validator': sc3.getString },
'Latitude': { 'validator': float },
'Longitude': { 'validator': float },
'Elevation': { 'validator': float },
'Place': { 'validator': sc3.getString },
'Country': { 'validator': sc3.getString },
'Affiliation': { 'validator': sc3.getString },
'Type': { 'validator': sc3.getString },
'ArchiveNetworkCode': { 'validator': sc3.getString },
'Archive': { 'validator': sc3.getString },
'Restricted': { 'validator': sc3.getBool },
'Shared': { 'validator': sc3.getBool },
'Remark': { 'validator': sc3.getBlob }
}
},
'network': {
'creator': seiscomp.datamodel.Network_Create,
'attributes': {
'Code': { 'validator': sc3.getString },
'Start': { 'validator': sc3.getDate },
'End': { 'validator': sc3.getDate },
'Description': { 'validator': sc3.getString },
'Institutions': { 'validator': sc3.getString },
'Region': { 'validator': sc3.getString },
'Type': { 'validator': sc3.getString },
'NetClass': { 'validator': sc3.getString },
'Archive': { 'validator': sc3.getString },
'Comment': { 'validator': sc3.getString },
'Pid': { 'validator': sc3.getBlob },
'Restricted': { 'validator': sc3.getBool },
'Shared': { 'validator': sc3.getBool },
'Remark': { 'validator': sc3.getBlob }
}
},
'stationGroup': {
'creator': seiscomp.datamodel.StationGroup_Create,
'attributes': {
'Code': { 'validator': sc3.getString },
'Start': { 'validator': sc3.getDate },
'End': { 'validator': sc3.getDate },
'Description': { 'validator': sc3.getString },
'Type': { 'validator': sc3.getStationGroupType },
'Latitude': { 'validator': float },
'Longitude': { 'validator': float },
'Elevation': { 'validator': float },
}
},
'stationReference': {
'creator': seiscomp.datamodel.StationReference,
'attributes': {
'StationID': { 'validator': sc3.getString },
}
},
'datalogger': {
'creator': seiscomp.datamodel.Datalogger_Create,
'attributes': {
'Name': { 'validator': sc3.getString },
'Description': { 'validator': sc3.getString },
'DigitizerModel': { 'validator': sc3.getString },
'DigitizerManufacturer': { 'validator': sc3.getString },
'RecorderModel': { 'validator': sc3.getString },
'RecorderManufacturer': { 'validator': sc3.getString },
'ClockModel': { 'validator': sc3.getString },
'ClockManufacturer': { 'validator': sc3.getString },
'ClockType': { 'validator': sc3.getString },
'Gain': { 'validator': float },
'MaxClockDrift': { 'validator': float },
'Remark': { 'validator': sc3.getBlob }
}
},
'decimation': {
'creator': seiscomp.datamodel.Decimation,
'attributes': {
'SampleRateNumerator': { 'validator': int },
'SampleRateDenominator': { 'validator': int },
'AnalogueFilterChain': { 'validator': sc3.getBlob },
'DigitalFilterChain': { 'validator': sc3.getBlob }
}
},
'fir': {
'creator': seiscomp.datamodel.ResponseFIR_Create,
'attributes': {
"Name": { 'validator': sc3.getString },
"Gain": { 'validator': float },
"DecimationFactor": { 'validator': int },
"Delay": { 'validator': float },
"Correction": { 'validator': float },
"NumberOfCoefficients": { 'validator': int },
"Symmetry": { 'validator': sc3.getString },
"Coefficients": { 'validator': sc3.getRealArray },
"Remarks": { 'validator': sc3.getBlob }
}
},
'paz': {
'creator': seiscomp.datamodel.ResponsePAZ_Create,
'attributes': {
'Name': { 'validator': sc3.getString },
'Description': { 'validator': sc3.getString },
'Type': { 'validator': sc3.getString },
'Gain': { 'validator': float },
'GainFrequency': { 'validator': float },
'NormalizationFactor': { 'validator': float },
'NormalizationFrequency': { 'validator': float },
'NumberOfZeros': { 'validator': int },
'NumberOfPoles': { 'validator': int },
'Zeros': { 'validator': sc3.getComplexArray },
'Poles': { 'validator': sc3.getComplexArray },
'Remark': { 'validator': sc3.getBlob }
}
},
'sensor': {
'creator': seiscomp.datamodel.Sensor_Create,
'attributes': {
'Name': { 'validator': sc3.getString },
'Description': { 'validator': sc3.getString },
'Model': { 'validator': sc3.getString },
'Manufacturer': { 'validator': sc3.getString },
'Type': { 'validator': sc3.getString },
'Unit': { 'validator': sc3.getString },
'LowFrequency': { 'validator': float },
'HighFrequency': { 'validator': float },
'Response': { 'validator': sc3.getString },
'Remark': { 'validator': sc3.getBlob }
}
}
}
return(valid.get(mode))
def __init__(self, mode, child=[]):
self.sc3Mode = mode
self.sc3obj = None
self.sc3Valid = sc3._findValidOnes(mode)
self._sc3Childs = child
def _create(self):
if not self.sc3Valid:
raise Exception("Class without a type defined.")
return self.sc3Valid['creator']()
def sc3Att(self):
"""
This is the heart. You should return an dictionary of attributes to be
setted on the sc3 object. This dictionary will be used by the _fillSc3
method.
"""
raise Exception("Not Implemented !")
def sc3ValidKey(self, key):
if not self.sc3Valid:
raise Exception("Class without a type defined.")
return (key in self.sc3Valid['attributes'])
def sc3Resolv(self, inventory):
"""
In this method you should be able to resolv all the references in your
self object.
"""
pass
def sc3Derived(self, inventory):
"""
This method should generate and collect all the derived objects
(child on the inventory sense) that should be attributed to the self
object. By default on this virtual method is returns an empty array.
"""
objs = []
for obj in self._sc3Childs:
objs.append(obj.sc3Obj(inventory))
return objs
def sc3ID(self, inventory):
obj = self.sc3Obj(inventory)
return obj.publicID()
def sc3Obj(self, inventory):
if not self.sc3obj:
# Get a new object
obj = self._create()
# try to resolve REFERENCES to PUBLIC ID
self.sc3Resolv(inventory)
# Add the derived objects in
for dobj in self.sc3Derived(inventory):
obj.add(dobj)
# Fill the Attributes in
self._fillSc3(obj, self.sc3Att())
# # Only want to see Networks:
# if (('Code' in self.sc3Att().keys())
# and ('ArchiveNetworkCode' not in self.sc3Att().keys())
# and ('Azimuth' not in self.sc3Att().keys())
# ):
# print('DEBUG basesc3.py: sc3Obj:', self, self.sc3Att())
# Set as created
self.sc3obj = obj
# return the obj
return self.sc3obj

View File

@ -0,0 +1,506 @@
from __future__ import print_function
import sys
import csv
import re
from datetime import datetime
def getFieldNames(fd):
tmp = fd.readline().split(',')
fieldNames = []
for i in tmp:
fieldNames.append(i.strip())
return fieldNames
def quote(instr):
return '"'+instr+'"'
def hummanStr(instr):
return instr.replace("_"," ")
def parseDate(val):
if not val or val == "":
return None
date=val.replace("/", "-")
formats={ len("YYYY-JJJ") : "%Y-%j",
len("YYYY-MM-DD") : "%Y-%m-%d",
len("YYYY-JJJ:HHMM") : "%Y-%j:%H%M",
len("YYYY-JJJTHH:MM") : "%Y-%jT%H:%M",
len("YYYY-MM-DDTHH:MM") : "%Y-%m-%dT%H:%M",
len("YYYY-JJJTHH:MM:SS") : "%Y-%jT%H:%M:%S",
len("YYYY-MM-DDTHH:MM:SS") : "%Y-%m-%dT%H:%M:%S"}
try:
return datetime.strptime(date, formats[len(date)])
except Exception as e:
raise ValueError("invalid date: " + date + str(e))
def formatDate(date):
if not date:
return ""
if date.hour != 0 or date.minute != 0:
return datetime.strftime(date,"%Y/%j:%H%M")
return datetime.strftime(date,"%Y/%j")
def isPyVersion(major, minor):
return sys.version_info[0] == major and \
sys.version_info[1] == minor
class StationMappings:
def __init__(self, networkCode, stationList, filename):
self.networkCode = networkCode
self.stationList = stationList
self.stationMapping = {}
self.stationBreak = {}
if not filename: return
_rx_statmap = re.compile(r'\s*([^_]*)_([^=]*)=(\S*)\s*(from=([0-9]+/[0-9]+))?\s*(to=([0-9]+/[0-9]+))?\s*$')
fd = open(filename)
stationMapping = {}
try:
lineno = 0
try:
line = fd.readline()
lineno = 1
while line:
m = _rx_statmap.match(line)
if m is None:
raise Exception("parse error")
(sta, net, archive_net, from_def, from_year, to_def, to_year) = m.groups()
if net != self.networkCode:
line = fd.readline()
continue
if sta not in self.stationList:
line = fd.readline()
continue
try:
sta_net = stationMapping[sta]
except KeyError:
sta_net = []
stationMapping[sta] = sta_net
if from_def:
from_date = parseDate(from_year)
else:
from_date = None
if to_def:
to_date = parseDate(to_year)
else:
to_date = None
sta_net.append((from_date, to_date, archive_net))
line = fd.readline()
lineno += 1
except (Exception, TypeError, ValueError) as e:
raise Exception("%s:%d: %s" % (file, lineno, str(e)))
finally:
fd.close()
if len(stationMapping):
print("Found %d station mappings" % len(stationMapping), file=sys.stderr)
self.stationMapping = stationMapping
else:
## print("No station mappings found", file=sys.stderr)
pass
def dump(self, fdo, stationCode):
items = []
for (code, mapping) in self.stationMapping.items():
if stationCode and stationCode != code: continue
items.append(code)
for (fromDate, toDate, network) in mapping:
fdo.write("Sa: ArchiveNetworkCode=%s %s" % (network, code))
if fromDate:
fdo.write(" from=%s" % formatDate(fromDate))
if toDate:
fdo.write(" to=%s" % formatDate(toDate))
fdo.write("\n")
for code in items:
self.stationMapping.pop(code)
def getMappings(self, code, start, end):
mapping = []
if (code, start, end) not in self.stationBreak:
mapping.append([start, end])
else:
for (archiveNet, s, e, fr, to) in self.stationBreak[(code, start, end)]:
mapping.append([s, e])
return mapping
def parseStationLine(self, items):
stationCode = items[0].strip()
start = parseDate(items[10])
if len(items) > 11:
end = parseDate(items[11])
else:
end = None
if stationCode not in self.stationMapping:
## print("Skipping %s not in mapping list" % stationCode, file=sys.stderr)
return self.getMappings(stationCode, start, end)
for (fDate, tDate, archiveNet) in self.stationMapping[stationCode]:
if fDate and tDate:
raise Exception("Not Supported to and from definitions found.")
elif fDate:
if fDate >= start:
if (end and fDate <= end) or not end:
## print("Processing fDate %s %s %s [%s]" % (stationCode, start, end, fDate), file=sys.stderr)
if (stationCode, start, end) in self.stationBreak:
raise Exception("Crazy multiple station mapping for the same station line")
self.stationBreak[(stationCode, start, end)] = []
self.stationBreak[(stationCode, start, end)].append((self.networkCode, start, fDate, fDate, tDate))
self.stationBreak[(stationCode, start, end)].append((archiveNet, fDate, end, fDate, tDate))
## prin( " found mapping From -> %s (%s,%s)" % (fDate, stationCode, formatDate(start)), file=sys.stderr)
return self.getMappings(stationCode, start, end)
elif tDate:
if tDate >= start:
if (end and tDate <= end) or not end:
## print("Processing tDate %s %s %s [%s]" % (stationCode, start, end, tDate), file=sys.stderr)
if (stationCode, start, end) in self.stationBreak:
raise Exception("Crazy multiple station mapping for the same station line")
self.stationBreak[(stationCode, start, end)] = []
self.stationBreak[(stationCode, start, end)].append((archiveNet, start, tDate, fDate, tDate))
self.stationBreak[(stationCode, start, end)].append((self.networkCode, tDate, end, fDate, tDate))
## print(" found mapping To -> %s (%s,%s)" % (tDate, stationCode, formatDate(start)), file=sys.stderr)
return self.getMappings(stationCode, start, end)
else:
if (stationCode, start, end) in self.stationBreak:
raise Exception("Crazy multiple station mapping for the same station line")
self.stationBreak[(stationCode, start, end)] = []
self.stationBreak[(stationCode, start, end)].append((archiveNet, start, end, fDate, tDate))
## print(" found mapping ALL (%s,%s)" % (stationCode, formatDate(start)), file=sys.stderr)
return self.getMappings(stationCode, start, end)
## print("Ignored %s" % " ".join(items), file=sys.stderr)
return self.getMappings(stationCode, start, end)
class StationAttributes:
def __init__(self, networkCode, stationList, filename):
self.networkCode= networkCode
self.stationList = stationList
self.stationAttributeList = {}
if not filename: return
fd = open(filename)
attributes = {}
try:
try:
fieldNames = None
if isPyVersion(2, 3):
fieldNames = getFieldNames(fd)
for row in csv.DictReader(fd, fieldNames):
net_code = row['net_code']
if net_code != self.networkCode: continue
sta_code = row['sta_code']
if sta_code not in self.stationList: continue
start = parseDate(row['start'].strip())
if sta_code in attributes:
raise Exception("multiple %s found in %s" % (str((net_code, sta_code, row['start'])), filename))
del row['net_code']
del row['sta_code']
del row['start']
## Clean up input
for key in ['restricted', 'restricted_exc', 'place', 'country', 'affiliation', 'remark']:
row[key] = row[key].strip()
if len(row[key]) == 0:
del row[key]
if 'restricted' in row:
row['restricted'] = bool(int(row['restricted']))
if not row['restricted']: del (row['restricted'])
if row:
attributes[sta_code] = row
except KeyError as e:
raise Exception("column %s missing in %s" % (str(e), filename))
except (TypeError, ValueError) as e:
raise Exception("error reading %s: %s" % (filename, str(e)))
finally:
fd.close()
self.stationAttributeList = self.__build__(attributes)
print(" loaded attributes for %d stations on network %s (%s)" % (len(self.stationAttributeList), self.networkCode, filename), file=sys.stderr)
def __build__(self, attributes):
newat = {}
if not attributes:
## print("no station attributes found for network %s" % self.networkCode, file=sys.stderr)
return newat
for (code,row) in attributes.items():
nr = {}
for (k,v) in row.items():
if k == 'country': k = 'Country'
if k == 'place': k = 'Place'
if k == 'affiliation': k = 'Affiliation'
if k == 'remark': k = 'Remark'
if k == 'restricted': k = 'Restricted'
nr[k] = v
if nr:
newat[code] = nr
return newat
def get(self, code):
if self.stationAttributeList and code in self.stationAttributeList:
return self.stationAttributeList[code]
else:
return None
def __parseDescription__(self, description):
affiliation = None
place = None
country = None
description = hummanStr(description)
hasStation = True if description.find("Station") >= 0 else False
if hasStation:
affiliation = description[0:(description.index("Station"))].strip()
parts = description[description.index("Station")+7:].strip().split(",")
else:
parts = description.split(",")
if len(parts) > 1:
country = parts[len(parts)-1].strip()
parts = parts[0:(len(parts)-1)]
place = ",".join(parts)
else:
place = ",".join(parts)
# print("Country:", country, file=sys.stderr)
# print("Place:", place, file=sys.stderr)
# print("Affiliation:", affiliation, file=sys.stderr)
oui = {}
if country:
oui['Country'] = country
if place:
oui['Place'] = place
if affiliation:
oui['Affiliation'] = affiliation
return oui
def reorder_station_attr(self):
att = {}
if not self.stationAttributeList:
return None
for (code, row) in self.stationAttributeList.items():
for (k, v) in row.items():
if k == 'restricted_exc':
k = 'Restricted'
extra=',*,'+str(v)
v = (not row['Restricted']) if 'Restricted' in row else True
else:
extra= ''
try:
dk = att[k]
except:
dk = {}
att[k] = dk
try:
dv = dk[str(v)]
except:
dv = []
dk[str(v)] = dv
dv.append(code+extra)
return att
def parseStationLine(self, items, fStart = None, fEnd = None):
stationCode = items[0].strip()
description = items[1]
start = parseDate(items[10])
if stationCode not in self.stationList:
raise Exception("Station %s not in station list." % stationCode)
## Here we can force a different start & End values to the line
if fStart is not None:
start = fStart
if fEnd is not None:
end = fEnd
oui = None
at = self.get(stationCode)
#print >>sys.stderr,items, at, file=sys.stderr)
if not at:
## print(" Deriving attributes from description %s " % " ".join(items), file=sys.stderr)
at = self.__parseDescription__(description)
if at:
self.stationAttributeList[stationCode] = at
else:
for item in ['Affiliation', 'Country', 'Place']:
if item in at:
continue
if not oui:
## print(" Deriving attribute (%s) from description %s " % (item, " ".join(items)), file=sys.stderr)
oui = self.__parseDescription__(description)
if item in oui:
## print(" Setting attribute (%s) from description for %s = %s" % (item, stationCode, oui[item]), file=sys.stderr)
at[item] = oui[item]
else:
## print(" Empty %s for %s" % (item, stationCode), file=sys.stderr)
pass
country = at['Country'] if 'Country' in at else None
place = at['Place'] if 'Place' in at else None
return [place, country]
def dump(self, fdo, code):
if not code:
att = self.reorder_station_attr()
for (key,v) in att.items():
if key in ['Country', 'Place']: continue
for (value, s) in v.items():
fdo.write("Sa: %s=%s" % (key, quote(value)))
for station in s:
fdo.write(" %s" % (station))
fdo.write("\n")
else:
at = self.get(code)
if not at: return
if 'done' in at: return
at['done'] = 1 # Mark the item as printed
for (k,v) in at.items():
extra = ''
if k in [ 'done', 'Place', 'Country']: continue
if k in ['Affiliation']: v = quote(v)
if k == 'Restricted':
extra = ' %s,*,*' % code
if k == 'restricted_exc':
k = 'Restricted'
extra=',*,'+str(v)
v = (not at['Restricted']) if 'Restricted' in at else True
fdo.write("Sa: %s=%s %s%s\n" % (k,v,code,extra))
class NetworkAttributes:
def __build__(self, row):
#net_code,start,end,restricted,shared,net_class,type,institutions,region,remark
attList = {}
if row['start']:
self.start = row['start'].strftime("%Y/%j")
self.startDate = row['start']
self.hasStart = True
if row['end']:
self.end = row['end'].strftime("%Y/%j")
self.endDate = row['end']
self.hasEnd = True
if row['restricted'] != 0:
attList['Restricted'] = row['restricted']
if row['shared'] != 1:
attList['Shared'] = row['shared']
if row['net_class']:
attList['NetClass'] = row['net_class'].strip()
if row['type']:
attList['Type'] = row['type'].strip()
if row['institutions']:
attList['Institutions'] = row['institutions'].strip()
if row['region']:
attList['Region'] = row['region'].strip()
if row['remark']:
attList['Remark'] = row['remark'].strip()
self.networkAttributes.update(attList)
def parseNetworkLine(self, items):
if len(items) < 4 or len(items) > 6:
raise Exception("Invalid network line")
attList = {}
if items[1] == "none":
attList['Description'] = hummanStr(items[0])
else:
attList['Description'] = "%s (%s)" % (hummanStr(items[0]), items[1])
self.networkAttributes.update(attList)
def dump(self, fdo):
for (k,v) in self.networkAttributes.items():
if k in ['Description', 'Remark', 'Region', 'Institutions']:
v = quote(v)
fdo.write("Na: %s=%s\n" % (k,v))
def __init__(self, networkCode, filename):
self.networkCode = networkCode
self.networkAttributes = {}
self.start = None
self.end = None
self.hasStart = False
self.hasEnd = False
if not filename: return
fd = open(filename)
try:
try:
fieldNames = None
if isPyVersion(2, 3):
fieldNames = getFieldNames(fd)
for row in csv.DictReader(fd, fieldNames):
net_code = row['net_code']
if net_code != self.networkCode: continue
#del row['net_code']
#del row['start']
row['start'] = parseDate(row['start'])
row['end'] = parseDate(row['end'])
row['restricted'] = bool(int(row['restricted']))
row['shared'] = bool(int(row['shared']))
row['region'] = row['region'].strip()
row['remark'] = row['remark'].strip()
row['institutions'] = row['institutions'].strip()
self.__build__(row)
break
except KeyError as e:
raise Exception("column %s missing in %s" % (str(e), filename))
except (TypeError, ValueError) as e:
raise Exception("error reading %s: %s" % (filename, str(e)))
finally:
fd.close()
print(" found %d Attribute for network %s (%s)" % (len(self.networkAttributes), self.networkCode, filename), file=sys.stderr)

View File

@ -0,0 +1,160 @@
import re
from datetime import datetime
import string
from functools import reduce
class parsers(object):
@staticmethod
def parseString(val):
return val.strip()
@staticmethod
def _parse_paz(npaz, s):
_rx_paz = re.compile(r'\s*([0-9]*)\(\s*([^,]+),\s*([^)]+)\)\s*')
pos = 0
n = 0
c = []
while pos < len(s):
m = _rx_paz.match(s, pos)
if m is None:
raise Exception("error parsing PAZ at '" + s[pos:] + "'")
try:
if len(m.group(1)) > 0:
x = int(m.group(1))
else:
x = 1
rv = m.group(2)
iv = m.group(3)
float(rv)
float(iv)
except ValueError:
raise Exception("error parsing PAZ at '" + s[pos:] + "'")
for i in range(0, x):
c.append((rv, iv))
i = i
n += x
pos = m.end()
if n != npaz:
raise Exception("expected %d PAZ, found %d" % (npaz, n))
return c
@staticmethod
def _normalize(num, denom):
if num > denom:
(a, b) = (num, denom)
else:
(a, b) = (denom, num)
while b > 1:
(a, b) = (b, a % b)
if b == 0:
return (num / a, denom / a)
return (num, denom)
@staticmethod
def _rational(x):
sign, mantissa, exponent = x.as_tuple()
sign = (1, -1)[sign]
mantissa = sign * reduce(lambda a, b: 10 * a + b, mantissa)
if exponent < 0:
return parsers._normalize(mantissa, 10 ** (-exponent))
else:
return (mantissa * 10 ** exponent, 1)
@staticmethod
def _parseFloat(val, mi=None , ma= None):
number = float(val)
if (mi and number < mi) or (ma and number > ma):
raise Exception("Invalid Range")
return number
@staticmethod
def parseGain(val):
try:
return parsers._parseFloat(val, 0.0, None)
except Exception as e:
raise Exception("Invalid Gain: %s" % e)
@staticmethod
def parseLongitude(val):
try:
return parsers._parseFloat(val, -180.0, 180.0)
except Exception as e:
raise Exception("Invalid Longitude: %s" % e)
@staticmethod
def parseLatitude(val):
try:
return parsers._parseFloat(val, -90.0, 90.0)
except Exception as e:
raise Exception("Invalid Latitude: %s" % e)
@staticmethod
def parseDepth(val):
# Deepest mine ~ 5000 m
try:
return parsers._parseFloat(val, 0.0, 5000)
except Exception as e:
raise Exception("Invalid Depth: %s" % e)
@staticmethod
def parseElevation(val):
# Highest Everest ~8500 m
# Deepest Mariana ~11000 m
try:
return parsers._parseFloat(val, -11000, 9000)
except Exception as e:
raise Exception("Invalid Elevation: %s" % e)
@staticmethod
def parseDate(val):
date=val.replace("/", "-")
formats={ len("YYYY-JJJ") : "%Y-%j",
len("YYYY-MM-DD") : "%Y-%m-%d",
len("YYYY-JJJ:HHMM") : "%Y-%j:%H%M",
len("YYYY-JJJTHH:MM") : "%Y-%jT%H:%M",
len("YYYY-MM-DDTHH:MM") : "%Y-%m-%dT%H:%M",
len("YYYY-JJJTHH:MM:SS") : "%Y-%jT%H:%M:%S",
len("YYYY-MM-DDTHH:MM:SS") : "%Y-%m-%dT%H:%M:%S"}
try:
return datetime.strptime(date, formats[len(date)])
except Exception as e:
raise ValueError("invalid date: " + date + str(e))
@staticmethod
def parseLocationCode(val):
Code = val.strip()
if len(Code) > 2 or len(re.sub("[A-Z0-9-*?]","",Code)) > 0:
raise Exception("wrong code for location: %s" % Code)
return Code
@staticmethod
def parseStationCode(val):
Code = val.strip()
if not Code or len(Code) > 5 or len(re.sub("[A-Z0-9*?]","",Code)) > 0:
raise Exception("Wrong code for station: %s" % Code)
return Code
@staticmethod
def parseChannelCode(val):
Code = val.strip()
if not Code or len(Code) > 3 or len(re.sub("[A-Z0-9*?]","",Code)) > 0:
raise Exception("Wrong code for channel: %s" % Code)
return Code
@staticmethod
def parseNetworkCode(val):
Code = val.strip()
if not Code or len(Code) > 2 or len(re.sub("[A-Z0-9*?]","",Code)) > 0:
raise Exception("Wrong code for network: %s" % Code)
return Code

File diff suppressed because it is too large Load Diff

1645
lib/python/nettab/nettab.py Normal file

File diff suppressed because it is too large Load Diff

523
lib/python/nettab/nodesi.py Normal file
View File

@ -0,0 +1,523 @@
from __future__ import print_function
from .lineType import Dl, Se, Ff, Pz, Cl
from .basesc3 import sc3
import sys
class prefixable(object):
def adjust(self, prefix):
if prefix:
self.id = "%s:%s" % (prefix, self.id)
class Instruments(object):
def __init__(self, prefix=""):
self.keys = []
self.ses = {}
self.dls = {}
self.fls = {}
self.cls = {}
self._sensors = {}
self._datalogger = {}
self._filters = {}
self._Cal = {}
self._prefix = prefix
def sc3Objs(self):
objs = []
for s in list(self._sensors.values()):
objs.append(s.sc3Obj(self))
for s in list(self._datalogger.values()):
objs.append(s.sc3Obj(self))
for s in list(self._filters.values()):
objs.append(s.sc3Obj(self))
return objs
def add(self, obj):
where = None
if isinstance(obj, Se):
where = self.ses
elif isinstance(obj, Dl):
where = self.dls
elif isinstance(obj, Cl):
where = self.cls
elif isinstance(obj, Ff) or isinstance(obj, Pz):
where = self.fls
else:
raise Exception("Object type %s doesn't fir this class" % type(obj))
if obj.id in self.keys:
raise Exception("Object id %s already exist." % (obj))
self.keys.append(obj.id)
where[obj.id] = obj
return
def instrumentId(self, iid, gain):
if gain is None:
if iid in self.dls:
gain = self.dls[iid].gain
elif iid in self.ses:
gain = self.ses[iid].gain
else:
raise Exception("Instrument iid not found")
siid = "%s/g=%s" % (iid, int(float(gain)))
return siid
def loadDataloggerCalibrations(self, dsm, dsn, dch, dsg, start, end, dd):
cls = []
for cl in self.cls.values():
if cl.type != "L": continue
if cl.match(dsm, dsn):
cls.append(Calibration(cl, dch, start, end))
if len(cls) == 0:
if dsn in self.cls:
print("[%s] No calibrations found for serial number %s and model %s " % (dsm, dsn, dsm), file=sys.stderr)
return
diid = self.instrumentId(dsm, dsg)
try:
datalogger = self._datalogger[diid].sc3Obj(self)
if dd != datalogger.publicID():
raise Exception("Public Id doesn't match")
except:
raise Exception("[%s] Could not retrieve datalogger %s" % (dsm, diid))
for cl in cls:
if (dsm, dsn, dch, start, end) in self._Cal:
## print >> sys.stderr,"[%s] Skiping calibration channel %s" % (dsm, cl.channel)
continue
## print >> sys.stderr,"[%s] Adding calibration %s (%s)" % (dsm, cl.channel, dd)
datalogger.add(cl.sc3Obj(self))
self._Cal[(dsm, dsn, dch, start, end)] = cl
def loadSensorCalibrations(self, ssm, ssn, sch, ssg, start, end, ss):
cls = []
for cl in self.cls.values():
if cl.type != "S": continue
if cl.match(ssm, ssn):
cls.append(Calibration(cl, sch, start, end))
if len(cls) == 0:
if ssn in self.cls:
print("[%s] No calibrations found for serial number %s and model %s " % (ssm,ssn, ssm), file=sys.stderr)
return
siid = self.instrumentId(ssm, ssg)
try:
sensor = self._sensors[siid].sc3Obj(self)
if ss != sensor.publicID():
raise Exception("Public Id doesn't match")
except:
raise Exception("[%s] Could not retrieve sensor %s" % (ssm, siid))
for cl in cls:
if (ssm, ssn, sch, start, end) in self._Cal:
## print >> sys.stderr,"[%s] Skiping calibration channel %s" % (ssm, cl.channel)
continue
## print >> sys.stderr,"[%s] Adding calibration %s channel %s start %s" % (ssm, ssn, cl.channel, start)
sensor.add(cl.sc3Obj(self))
self._Cal[(ssm, ssn, sch, start, end)] = cl
def check(self, networks):
error = []
# Dataloggers check
error.append("* Dataloggers:")
for dl in self.dls.values():
error.extend(dl.check(self))
error.append("")
# Check fir filters
error.append("* Filters:")
for f in self.fls.values():
c = False
for dl in self.dls.values():
c = c or dl.use(f)
if c: break
if not c: error.append(" [%s] filter is not used" % f.id)
error.append("")
# Check the calibrations
error.append("* Calibrations:")
for cl in self.cls.values():
error.extend(cl.check(self))
error.append("")
error.append("* Sensors:")
for f in self.ses.values():
c = False
for network in networks.values():
for station in network.stations:
for location in station.locations:
for channel in location.channels:
c = c or channel.use(f)
if c: break
if c: break
if c: break
if c: break
if not c: error.append(" [%s] sensor is not used" % f.id)
error.append("")
error.append("* Dataloggers:")
for f in self.dls.values():
c = False
for network in networks.values():
c = c or network.use(f)
if c: break
if not c: error.append(" [%s] datalogger is not used" % f.id)
error.append("")
return error
def filterType(self, iid):
if iid not in self.keys:
raise Exception("[%s] Filter id not found" % iid)
if iid not in self.fls:
raise Exception("[%s] Object is not a filter" % iid)
obj = self.fls[iid]
if isinstance(obj, Ff):
fType = 'D'
elif isinstance(obj, Pz):
fType = obj.type
return fType
def filterID(self, iid):
if iid not in self.keys:
raise Exception("[%s] Filter id not found" % iid)
if iid not in self.fls:
raise Exception("[%s] Object is not a filter" % iid)
if iid not in self._filters:
obj = self.fls[iid]
if isinstance(obj, Pz):
## print >> sys.stderr," Generating new Filter (PZ): %s %s" % (iid,obj.type)
newFilter = Paz(obj)
elif isinstance(obj, Ff):
## print >> sys.stderr," Generating new Filter (Fir): %s" % (iid)
newFilter = Fir(obj)
newFilter.adjust(self._prefix)
if newFilter.id != self.prefix(iid):
raise Exception("Invalid filter created %s" % (iid))
self._filters[iid] = newFilter
return self._filters[iid].sc3ID(self)
def prefix(self, iid):
if self._prefix:
iid = "%s:%s" % (self._prefix, iid)
return iid
def dataloggerID(self, iid, gain = None):
if iid not in self.keys:
raise Exception("Object not found.")
if iid not in self.dls:
raise Exception("[%s] Object is not a datalogger" % iid)
diid = self.instrumentId(iid, gain)
if diid not in self._datalogger:
## print >> sys.stderr,"Generating datalogger %s -> %s" % (iid, diid)
newDatalogger = Dataloger(self.dls[iid], gain)
newDatalogger.adjust(self._prefix)
if newDatalogger.id != self.prefix(diid):
raise Exception("Invalid datalogger created %s %s" % (iid, diid))
self._datalogger[diid] = newDatalogger
return self._datalogger[diid].sc3ID(self)
def sensorID(self, iid, gain = None):
if iid not in self.keys:
raise Exception("Object not found.")
if iid not in self.ses:
raise Exception("[%s] Object is not a sensor" % iid)
diid = self.instrumentId(iid, gain)
if diid not in self._sensors:
## print >> sys.stderr,"Generating Sensor %s -> %s" % (iid, diid)
newSensor = Sensor(self.ses[iid], gain)
newSensor.adjust(self._prefix)
if newSensor.id != self.prefix(diid):
raise Exception("Invalid sensor created %s %s" % (iid, diid))
self._sensors[diid] = newSensor
return self._sensors[diid].sc3ID(self)
def _findObject(self, objID, where):
obj = None
for ob in where.values():
obj = ob.sc3Obj(self)
if obj.publicID() == objID:
break;
if not obj:
raise Exception("Object not found: %s " % objID)
return obj
def _findCallibration(self, obj, count, serialNumber, channel, start):
if serialNumber is None:
return None
if channel is None:
return None
for cal in [obj(i) for i in range(0, count)]:
if cal.serialNumber() == serialNumber and cal.channel() == channel:
return cal.gain()
return None
def _sensorGain(self, seID, serialNumber, channel, start):
sensor = self._findObject(seID, self._sensors)
if not sensor:
raise Exception("Not found %s" % seID)
sensorFilter = self._findObject(sensor.response(), self._filters)
if not sensorFilter:
raise Exception("Not found %s" % seID)
gainFrequency = sensorFilter.gainFrequency()
try:
gainUnit = sensor.unit()
except:
print("[%s] No gain unit supplied" % seID, file=sys.stderr)
gainUnit = None
gain = self._findCallibration(sensor.sensorCalibration, sensor.sensorCalibrationCount(), serialNumber, channel, start)
if gain is not None:
## print >> sys.stderr,'[%s] Using sensor gain from calibration %s' % (serialNumber, gain)
pass
else:
gain = sensorFilter.gain()
return (gain, gainFrequency, gainUnit)
def _dataloggerGain(self, dtID, serialNumber, channel, Numerator, Denominator, start):
datalogger = self._findObject(dtID, self._datalogger)
gain = self._findCallibration(datalogger.dataloggerCalibration, datalogger.dataloggerCalibrationCount(), serialNumber, channel, start)
if gain is not None:
##print >> sys.stderr,'[%s] Using datalogger gain from calibration %s' % (serialNumber, gain)
pass
else:
gain = datalogger.gain()
decimation = None
for i in range(0,datalogger.decimationCount()):
decimation = datalogger.decimation(i)
if decimation.sampleRateNumerator() == Numerator and decimation.sampleRateDenominator() == Denominator:
break
decimation = None
if not decimation:
raise Exception("Decimation not found %s/%s" % (Numerator, Denominator))
af = decimation.analogueFilterChain().content().split()
df = decimation.digitalFilterChain().content().split()
for fiID in af:
g = self._findObject(fiID, self._filters).gain()
#print >> sys.stderr,"Multiplying by %s %s" % (fiID, g)
gain = gain * g
for fiID in df:
g = self._findObject(fiID, self._filters).gain()
#print >> sys.stderr,"Multiplying by %s %s" % (fiID, g)
gain = gain * g
return gain
def getChannelGainAttribute(self, dtID, seID, dtSerialNumber, seSerialNumber, dtChannel, seChannel, Numerator, Denominator, channelStart):
if not dtID or not seID:
raise Exception("Empty instruments ID supplied.")
(sensorGain, sensorFrequency,sensorUnit) = self._sensorGain(seID, seSerialNumber, seChannel, channelStart)
dataloggerGain = self._dataloggerGain(dtID, dtSerialNumber, dtChannel, Numerator, Denominator, channelStart)
att = {}
att['Gain'] = sensorGain * dataloggerGain
if sensorFrequency is not None:
att['GainFrequency'] = sensorFrequency
if sensorUnit is not None:
att['GainUnit'] = sensorUnit
return att
class Paz(sc3, prefixable):
def __init__(self, pz):
sc3.__init__(self, 'paz')
self.id = pz.id
self.att = pz.getAttributes()
def sc3Att(self):
att = {}
att['Name'] = self.id
for (key,value) in self.att.items():
if not self.sc3ValidKey(key) or key in att:
print(" [%s] [%s] Ignoring Attribute %s = %s " % (self.sc3Mode, self.id, key,value), file=sys.stderr)
continue
att[key] = value
return att
class Sensor(sc3, prefixable):
def __init__(self, se, gain = None):
sc3.__init__(self, 'sensor')
self.baseid = se.id
self.att = se.getAttributes()
self.pz = se.generatePz(gain)
self.id = "%s/g=%s" % (self.baseid, int(float(self.pz.gain)))
def sc3Resolv(self, inventory):
try:
self.att['Response'] = inventory.filterID(self.pz.id)
## print >> sys.stderr,"Re-used a sensor pole-zero"
except:
inventory.add(self.pz)
self.att['Response'] = inventory.filterID(self.pz.id)
def sc3Att(self):
att = {}
att['Name'] = self.id
for (key, value) in self.att.items():
if not self.sc3ValidKey(key) or key in att:
print(" [%s] [%s] ignoring Attribute %s = %s " % (self.sc3Mode, self.id, key, value), file=sys.stderr)
continue
att[key] = value
## Forcing needed description on the sensor
if 'Description' not in att:
att['Description'] = self.id
return att
class Fir(sc3, prefixable):
def __init__(self, ff):
sc3.__init__(self, 'fir')
self.id = ff.id
self.gain = ff.gain
self.att = ff.getAttributes()
def sc3Att(self):
att = {}
att['Name'] = self.id
for (key,value) in self.att.items():
if not self.sc3ValidKey(key) or key in att :
print(" [%s] [%s] Ignoring Attribute %s = %s " % (self.sc3Mode, self.id, key,value), file=sys.stderr)
continue
att[key] = value
return att
class Decimation(sc3):
def __init__(self, numerator, decimator, dl):
sc3.__init__(self, 'decimation')
self._numerator = numerator
self._denominator = decimator
self.chains = dl.chains[(numerator, decimator)]
self.att = {}
def sc3Resolv(self, inventory):
sequence = {}
sequence['A'] = []
sequence['D'] = []
for stage in self.chains:
sid = inventory.filterID(stage)
ADtype = inventory.filterType(stage)
sequence[ADtype].append(sid)
self.att['AnalogueFilterChain'] = " ".join(sequence['A'])
self.att['DigitalFilterChain'] = " ".join(sequence['D'])
def sc3Att(self):
att = {}
att['SampleRateNumerator'] = self._numerator
att['SampleRateDenominator'] = self._denominator
att.update(self.att)
return att
class Dataloger(sc3, prefixable):
def __init__(self, dl, gain = None):
dcs = []
sc3.__init__(self, 'datalogger', dcs)
if gain:
self.gain = gain
else:
self.gain = dl.gain
self.att = dl.getAttributes()
self.id = "%s/g=%s" % (dl.id, int(float(self.gain)))
self.maxClockDrift = dl.mcld
if dl.chains:
for (num, dec) in dl.chains:
dcs.append(Decimation(num, dec, dl))
self.dcs = dcs
else:
print("[%s] Datalogger %s has no stages." % (self.id, dl), file=sys.stderr)
def sc3Att(self):
att = {}
att['Name'] = self.id
att['Gain'] = self.gain
att['MaxClockDrift'] = self.maxClockDrift
for (key,value) in self.att.items():
if not self.sc3ValidKey(key) or key in att:
print(" [%s] [%s] ignoring Attribute %s = %s " % (self.sc3Mode, self.id, key, value), file=sys.stderr)
continue
att[key] = value
## Forcing needed description on the sensor
if 'Description' not in att:
att['Description'] = self.id
return att
class Calibration(sc3):
def __init__(self, cl, channel, start, end):
if cl.type == "S":
sc3.__init__(self, "sensorCalibration")
else:
sc3.__init__(self, "dataloggerCalibration")
if channel < 0 or channel >= cl.channelCount:
raise Exception("Invalid channel for calibration [%s]" % channel)
self.start = start
self.end = end
self.channel = channel
self.id = cl.id
self.att = cl.getAttributes(channel)
def sc3Att(self):
att = {}
att['SerialNumber'] = self.id
att['Start'] = self.start
if self.end:
att['End'] = self.end
for (key, value) in self.att.items():
if not self.sc3ValidKey(key) or key in att:
print(" [%s] [%s] Ignoring Attribute %s = %s " % (self.sc3Mode, self.id, key,value), file=sys.stderr)
continue
att[key] = value
return att

View File

@ -0,0 +1,489 @@
from __future__ import print_function
from .lineType import Sl, Nw, Sr, Sg
from .nodesi import Instruments
from .basesc3 import sc3
import sys
debug = 0
class DontFit(Exception):
def __init__(self, message):
Exception.__init__(self, message)
class nslc(object):
def __init__(self):
self.start = None
self.end = None
self.code = None
def __overlap__(self, another):
if self.end:
if self.end > another.start:
if not another.end or self.start < another.end:
return True
else:
if not another.end or self.start < another.end:
return True
return False
def _span(self):
return "%s / %s" % (self.start, self.end)
def sc3Att(self):
att = {}
att['Start'] = self.start
if self.end:
att['End'] = self.end
att['Code'] = self.code
for (key,value) in self.att.items():
if not self.sc3ValidKey(key) or key in att:
print("[%s] type %s ignoring attribute %s = %s " % (self.code, self.sc3Mode, key,value), file=sys.stderr)
continue
att[key] = value
return att
def _cmptime(t1, t2):
if t1 is None and t2 is None:
return 0
elif t2 is None or (t1 is not None and t1 < t2):
return -1
elif t1 is None or (t2 is not None and t1 > t2):
return 1
return 0
class StationGroup(nslc,sc3):
def __str__(self):
return "%s" % (self.code)
def __init__(self, sg):
if not isinstance(sg,Sg):
return False
self.stationReferences = []
sc3.__init__(self, 'stationGroup', self.stationReferences)
self.code = sg.code
self.start = sg.start
self.end = sg.end
self.att = sg.getStationGroupAttributes()
self.srdata = []
def __match__(self, sr):
if not isinstance(sr,Sr):
return False
return (_cmptime(sr.start, self.end) <= 0 and _cmptime(sr.end, self.start) >= 0)
def conflict(self, another):
if self.code != another.code:
return False
if self.end:
if self.end <= another.start:
return False
if another.end and another.end <= self.start:
return False
else:
if another.end and another.end <= self.start:
return False
return True
def Sr(self, sr):
self.srdata.append((sr.ncode, sr.scode, sr.start, sr.end))
def sc3Resolv(self, inventory):
for (ncode, scode, start, end) in self.srdata:
try:
for stationID in inventory.resolveStation(ncode, scode, start, end):
st = StationReference(self, stationID)
self.stationReferences.append(st)
except Exception as e:
sys.stderr.write(str(e) + "\n")
class StationReference(sc3):
def __str__(self):
return "%s" % (self.att["StationID"])
def __init__(self, stationGroup, stationID):
self.stationGroup = stationGroup
sc3.__init__(self, 'stationReference')
self.att = { "StationID": stationID }
def sc3Att(self):
return self.att
class Network(nslc, sc3):
def __str__(self):
return "%s" % (self.code)
def __init__(self, nw):
if not isinstance(nw,Nw):
return False
self.stations = []
sc3.__init__(self, 'network', self.stations)
nslc.__init__(self)
self.code = nw.code
self.start = nw.start
self.end = nw.end
self.att = nw.getNetworkAttributes()
def __match__(self, sl):
if not isinstance(sl,Sl):
return False
if sl.start < self.start:
return False
if self.end:
if not sl.end or sl.end > self.end:
return False
return True
def conflict(self, another):
if self.code != another.code:
return False
if self.end:
if self.end <= another.start:
return False
if another.end and another.end <= self.start:
return False
else:
if another.end and another.end <= self.start:
return False
return True
def Sl(self, sl):
if not self.__match__(sl):
raise DontFit(" Object doesn't fit this network object.")
inserted = False
for sta in self.stations:
try:
where = "%s" % (sta._span())
sta.Sl(sl)
if debug: print("[%s] inserted at %s -> %s" % (self, where, sta._span()), file=sys.stderr)
inserted = True
for other in self.stations:
if other is sta: continue
if other.conflict(sta):
raise Exception("I Station conflict with already existing station (%s/%s/%s)" % (other, other.start, other.end))
break
except DontFit:
pass
if not inserted:
st = Station(self, sl)
if debug: print("[%s] created new station %s %s" % (self, st, st._span()), file=sys.stderr)
for sta in self.stations:
if sta.conflict(st):
raise Exception("Station conflict with already existing station (%s/%s/%s)" % (sta, sta.start, sta.end))
self.stations.append(st)
def check(self, i):
error = []
for station in self.stations:
error.extend(station.check(i))
return error
def use(self, iid):
c = False
for station in self.stations:
c = c or station.use(iid)
if c: break
return c
class Station(nslc, sc3):
def __str__(self):
return "%s.%s" % (self.network.code, self.code)
def __init__(self, network, sl):
if not isinstance(sl,Sl):
return False
self.locations = []
self.network = network
sc3.__init__(self, 'station', self.locations)
# I load myself as a station
nslc.__init__(self)
self.code = sl.code
self.start = sl.start
self.end = sl.end
self.att = sl.getStationAttributes()
# Further parse to generate my locations
self.Sl(sl)
def __match__(self, obj):
if not isinstance(obj,Sl):
return False
# Check code
if obj.code != self.code:
return False
# Attributes
att = obj.getStationAttributes()
for at in att:
# Make sure that all attributes in Sl-line are here
if at not in self.att:
return False
# And they match
if att[at] != self.att[at]:
return False
# Make sure that there is no other attribute here that is not on Sl-line
for at in self.att:
if at not in att:
return False
return True
def __adjustTime__(self, sl):
if sl.start < self.start:
self.start = sl.start
if not self.end:
return
if sl.end and sl.end < self.end:
return
self.end = sl.end
def conflict(self, another):
if not isinstance(another, Station):
raise Exception("Cannot compare myself with %s" % type(another))
if self.code != another.code:
return False
if not self.__overlap__(another):
return False
return True
def use(self, iid):
c = False
for location in self.locations:
c = c or location.use(iid)
if c: break
return c
def check(self, i):
error = []
for location in self.locations:
error.extend(location.check(i))
return error
def Sl(self, sl):
if not self.__match__(sl):
raise DontFit(" sl doesn't fit this station %s/%s_%s." % (self.code, self.start, self.end))
# Handle Time Adjustments
self.__adjustTime__(sl)
# Handle Locations
inserted = False
for loc in self.locations:
try:
where = loc._span()
loc.Sl(sl)
if debug: print(" [%s] inserted at %s -> %s" % (self, where, loc._span()), file=sys.stderr)
inserted = True
for other in self.locations:
if other is loc: continue
if other.conflict(loc):
raise Exception("Location conflict with already existing location")
break
except DontFit:
pass
if not inserted:
loc = Location(self, sl)
if debug: print(" [%s] created new location %s %s" % (self, loc, loc._span()), file=sys.stderr)
for lc in self.locations:
if lc.conflict(loc):
raise Exception("Location conflict with already existing location")
self.locations.append(loc)
def sc3Att(self):
att = nslc.sc3Att(self)
## Make sure that we set the Remark
if 'ArchiveNetworkCode' not in att:
att['ArchiveNetworkCode'] = self.network.code
if 'Remark' not in att:
att['Remark'] = ""
return att
class Location(nslc, sc3):
def __str__(self):
return "%s.%s.%s" % (self.station.network.code, self.station.code, self.code)
def __init__(self, station, sl):
if not isinstance(sl, Sl):
return False
self.channels = []
sc3.__init__(self, 'location', self.channels)
nslc.__init__(self)
self.station = station
self.code = sl.location
self.start = sl.start
self.end = sl.end
self.att = sl.getLocationAttributes()
self.Sl(sl)
def __adjustTime__(self, sl):
if sl.start < self.start:
self.start = sl.start
if not self.end:
return
if sl.end and sl.end < self.end:
return
self.end = sl.end
def __match__(self, obj):
if not isinstance(obj, Sl):
return False
if obj.location != self.code:
return False
# Attributes
att = obj.getLocationAttributes()
for at in att:
# Make sure that all attributes in Sl-line are here
if at not in self.att:
return False
# And they match
if att[at] != self.att[at]:
return False
# Make sure that there is no other attribute here that is not on Sl-line
for at in self.att:
if at not in att:
return False
return True
def conflict(self, another):
if not isinstance(another, Location):
raise Exception("Cannot compare myself with %s" % type(another))
if self.code != another.code:
return False
if not self.__overlap__(another):
return False
return True
def use(self, iid):
c = False
for channel in self.channels:
c = c or channel.use(iid)
if c: break
return c
def check(self, i):
error = []
for channel in self.channels:
error.extend(channel.check(i))
return error
def Sl(self, sl):
if not self.__match__(sl):
raise DontFit(" This obj doesn't match this Location '%s'" % self.code)
# Handle Time Adjustments
self.__adjustTime__(sl)
# Create Channels
for code in sl.channels:
channel = (Channel(self, code, sl))
if debug: print(" [%s] created new channel %s/%s" % (self, channel, channel._span()), file=sys.stderr)
for echan in self.channels:
if echan.conflict(channel):
raise Exception("[%s] channel %s conflict with already existing channel" % (self, code))
#print >>sys.stderr," Channel %s appended at '%s'" % (code, self.code)
self.channels.append(channel)
class Channel(nslc, sc3):
def __str__(self):
return "%s.%s.%s.%s" % (self.location.station.network.code, self.location.station.code, self.location.code, self.code)
def __init__(self, location, code, sl):
sc3.__init__(self, 'channel')
self.location = location
nslc.__init__(self)
self.code = code
self.start = sl.start
self.end = sl.end
self.att = sl.getChannelAttributes(self.code)
## Bring the Instrument gains to the channel level
self._sensorGain = sl.sensorGain
self._dataloggerGain = sl.dataloggerGain
def conflict(self, another):
if not isinstance(another, Channel):
raise Exception("Cannot compare myself with %s" % type(another))
if self.code != another.code:
return False
if not self.__overlap__(another):
return False
return True
def use(self, iid):
if 'Datalogger' in self.att and iid == self.att['Datalogger']: return True
if 'Sesor' in self.att and iid == self.att['Sensor']: return True
return False
def check(self, i):
good = []
if not isinstance(i, Instruments):
raise Exception("Invalid instrument object")
if not self.att['Datalogger'] in i.keys:
good.append("no Datalogger")
if not self.att['Sensor'] in i.keys:
good.append("no Sensor")
if good:
good = [ " [%s] %s" % (self, "/".join(good)) ]
return good
def sc3Resolv(self, inventory):
if not inventory:
print("[%s] Warning, inventory not supplied" % self.code, file=sys.stderr)
return
try:
ssm = self.att['Sensor']
ssg = self._sensorGain
sch = self.att['SensorChannel']
ssn = self.att["SensorSerialNumber"] if "SensorSerialNumber" in self.att else None
# Sensor publicID
ss = inventory.sensorID(ssm, ssg)
self.att['Sensor'] = ss
# Sensor Calibration
inventory.loadSensorCalibrations(ssm, ssn, sch, ssg, self.start, self.end, ss)
except Exception as e:
print("[%s] Sensor Resolution Error %s" % (self, e), file=sys.stderr)
ss = None
try:
dsm = self.att['Datalogger']
dsg = self._dataloggerGain
dch = self.att['DataloggerChannel']
dsn = self.att['DataloggerSerialNumber'] if 'DataloggerSerialNumber' in self.att else None
dt = inventory.dataloggerID(dsm, dsg)
self.att['Datalogger'] = dt
inventory.loadDataloggerCalibrations(dsm, dsn, dch, dsg, self.start, self.end, dt)
except Exception as e:
print("[%s] Datalogger Resolution Error %s" % (self, e), file=sys.stderr)
dt = None
try:
up = self.att['SampleRateNumerator']
down = self.att['SampleRateDenominator']
self.att.update(inventory.getChannelGainAttribute(dt, ss, dsn, ssn, dch, sch, up, down, self.start))
except Exception as e:
print("[%s] Cannot find gain back for the channel: %s" % (self,e), file=sys.stderr)

View File

@ -0,0 +1,65 @@
import time, datetime
def _cmptime(t1, t2):
if t1 is None and t2 is None:
return 0
elif t2 is None or (t1 is not None and t1 < t2):
return -1
elif t1 is None or (t2 is not None and t1 > t2):
return 1
return 0
def _time2datetime(t):
result = datetime.datetime(*time.strptime(t.toString("%Y-%m-%dT%H:%M:00Z"), "%Y-%m-%dT%H:%M:%SZ")[0:6])
result += datetime.timedelta(microseconds=float(t.toString("%S.%f")) * 1000000)
class StationResolver(object):
def __init__(self):
self.stationMap = {}
self.initialStations = set()
def collectStations(self, inventory, initial = False):
for ni in range(inventory.networkCount()):
n = inventory.network(ni)
for si in range(n.stationCount()):
s = n.station(si)
try:
if initial:
self.initialStations.add((n.code(), s.code()))
else:
self.initialStations.remove((n.code(), s.code()))
del self.stationMap[(n.code(), s.code())]
except KeyError:
pass
try:
item = self.stationMap[(n.code(), s.code())]
except KeyError:
item = []
self.stationMap[(n.code(), s.code())] = item
start = _time2datetime(s.start())
try: end = _time2datetime(s.end())
except: end = None
item.append((start, end, s.publicID()))
def resolveStation(self, ncode, scode, start, end):
result = set()
try:
for (s, e, publicID) in self.stationMap[(ncode, scode)]:
if _cmptime(start, e) <= 0 and _cmptime(end, s) >= 0:
result.add(publicID)
except KeyError:
pass
if not result:
raise Exception("Station reference %s,%s cannot be resolved" % (ncode, scode))
return result

364
lib/python/nettab/tab.py Normal file
View File

@ -0,0 +1,364 @@
from __future__ import print_function
from .lineType import Nw, Sg, Sr, Sl, Sa, Na, Dl, Se, Ff, Pz, Ia, Cl
from .nodesi import Instruments
from .nodesnslc import Network, StationGroup, DontFit
import seiscomp.datamodel, seiscomp.io, seiscomp.client
from .stationResolver import StationResolver
import sys
import os
import glob
import re
__VERSION__ = "0.1"
class Tab(object):
def version(self):
return __VERSION__
def __init__(self, instrumentPrefix = None, defaultsFile = None, filterFolder = None, xmlFolder = None, database = None):
self.i = Instruments(instrumentPrefix)
self.n = {}
self.g = {}
self.sas = []
self.nas = []
self.ias = []
self.stationResolver = StationResolver()
self._filterFolder = None
print("Starting tab2inv version %s" % self.version(), file=sys.stderr)
if not filterFolder:
print(" Warning, not filter folder supplied.", file=sys.stderr)
else:
if not os.path.isdir(filterFolder):
raise Exception("Filter folder does not exist.")
self._filterFolder = filterFolder
if defaultsFile is not None:
self._defaults(defaultsFile)
if database is not None:
self._loadDatabase(database)
if xmlFolder is not None:
self._loadXml(xmlFolder)
def _defaults(self, filename):
sas = []
ias = []
nas = []
try:
fd = open(filename)
print(" Parsing defaults file: %s" % (filename), file=sys.stderr)
for line in fd:
line = line.strip()
if not line or line[0] == "#": continue
(Type, Content) = line.split(":",1)
if Type == "Nw":
raise Exception("Defaults file can only contain attributes")
elif Type == "Na":
nas.append(Na(Content))
elif Type == "Sa":
sas.append(Sa(Content))
elif Type == "Sl":
raise Exception("Defaults file can only contain attributes")
elif Type == "Ia":
ias.append(Ia(Content))
elif Type == "Se":
raise Exception("Defaults file can only contain attributes")
elif Type == "Dl":
raise Exception("Defaults file can only contain attributes")
elif Type == "Cl":
raise Exception("Defaults file can only contain attributes")
elif Type == "Ff":
raise Exception("Defaults file can only contain attributes")
elif Type == "If":
raise Exception("Defaults file can only contain attributes")
elif Type == "Pz":
raise Exception("Defaults file can only contain attributes")
else:
print(" Ignored line", line, file=sys.stderr)
fd.close()
except Exception as e:
print(" Warning: %s" % e, file=sys.stderr)
pass
self.sas = sas
self.nas = nas
self.ias = ias
def _loadDatabase(self, dbUrl):
m = re.match("(?P<dbDriverName>^.*):\/\/(?P<dbAddress>.+?:.+?@.+?\/.+$)", dbUrl)
if not m:
raise Exception("error in parsing SC3 DB url")
db = m.groupdict()
try:
registry = seiscomp.system.PluginRegistry.Instance()
registry.addPluginName("dbmysql")
registry.loadPlugins()
except Exception as e:
raise #"Cannot load database driver: %s"
dbDriver = seiscomp.io.DatabaseInterface.Create(db["dbDriverName"])
if dbDriver is None:
raise Exception("Cannot find database driver " + db["dbDriverName"])
if not dbDriver.connect(db["dbAddress"]):
raise Exception("Cannot connect to database at " + db["dbAddress"])
dbQuery = seiscomp.datamodel.DatabaseQuery(dbDriver)
if dbQuery is None:
raise Exception("Cannot get DB query object")
print(" Loading inventory from database ... ", end=' ', file=sys.stderr)
inventory = seiscomp.datamodel.Inventory()
dbQuery.loadNetworks(inventory)
for ni in range(inventory.networkCount()):
dbQuery.loadStations(inventory.network(ni))
print("Done.", file=sys.stderr)
if inventory:
self.stationResolver.collectStations(inventory, True)
def _loadXml(self, folder):
print(" Loading inventory from XML file ... ", end=' ', file=sys.stderr)
for f in glob.glob(os.path.join(folder, "*.xml")):
ar = seiscomp.io.XMLArchive()
ar.open(f)
inventory = seiscomp.datamodel.Inventory_Cast(ar.readObject())
ar.close()
if inventory:
self.stationResolver.collectStations(inventory)
print("Done.", file=sys.stderr)
def digest(self, tabFilename):
sas = []
ias = []
nw = None
n = None
g = None
print(" Parsing file: %s" % (tabFilename), file=sys.stderr)
if not tabFilename or not os.path.isfile(tabFilename):
raise Exception("Supplied filename is invalid.")
if tabFilename in list(self.n.keys()) or tabFilename in list(self.g.keys()):
raise Exception("File %s is already digested." % tabFilename)
filename = 1
try:
fd = open(tabFilename)
for line in fd:
obj = None
line = line.strip()
if not line or line[0] == "#": continue
if str(line).find(":") == -1:
raise Exception("Invalid line format '%s'" % line)
(Type, Content) = line.split(":",1)
if Type == "Nw":
if n or g:
raise Exception("Network or Station Group already defined, only one Hr line should be defined per file.")
try:
nw = Nw(Content)
except Exception as e:
raise Exception("Error while creating nw from '%s': %s" % (Content, e))
try:
for na in self.nas: nw.Na(na) # Defaults
except Exception as e:
raise Exception("Error while loading (defaults) %s into %s: %s" % (na, nw, e))
elif Type == "Sg":
if n or g:
raise Exception("Network or Station Group already defined, only one Hr line should be defined per file.")
try:
sg = Sg(Content)
except Exception as e:
raise Exception("Error while creating sg from '%s': %s" % (Content, e))
try:
for na in self.nas: sg.Na(na) # Defaults
except Exception as e:
raise Exception("Error while loading (defaults) %s into %s: %s" % (na, sg, e))
elif Type == "Na":
if not nw and not sg:
raise Exception("No network defined, no Na line before a Hr line.")
if n or g:
raise Exception("No Na lines after a Sl line. Network has already been defined.")
try:
na = Na(Content)
except Exception as e:
raise Exception("Error while creating na from '%s': %s" % (Content, e))
if nw:
try:
nw.Na(na)
except Exception as e:
raise Exception("Error while adding %s to %s: %s" % (na, nw, e))
else:
try:
sg.Na(na)
except Exception as e:
raise Exception("Error while adding %s to %s: %s" % (na, sg, e))
elif Type == "Sa":
if not nw:
raise Exception("Not Sa line before a hr line allowed.")
try:
sas.append(Sa(Content))
except Exception as e:
raise Exception("Error while creating Sa from '%s': %s" % (Content,e))
elif Type == "Sl":
if not n:
if not nw:
raise Exception("No network defined, Hr line should come before station line.")
else:
n = Network(nw)
for (filename, network) in self.n.items():
if network.conflict(n):
raise Exception("Network already defined %s (%s)-(%s) by file %s." % (network.code, network.start, network.end, filename))
try:
sl = Sl(Content)
except Exception as e:
raise Exception("Error while creating sl from '%s': %s" % (Content, e))
# Fill in attributes
try:
for sa in self.sas: sl.Sa(sa) # Defaults
except Exception as e:
raise Exception("Error while loading (default) %s into %s: %s" % (sa, sl, e))
try:
for sa in sas: sl.Sa(sa) # Collected
except Exception as e:
raise Exception("Error while loading %s into %s: %s" % (str(sa), str(sl), e))
# Digest by Station
try:
n.Sl(sl)
except DontFit:
raise Exception("%s does not fit in %s" % (sl, n))
except Exception as e:
raise Exception("Error while loading %s into %s: %s" % (sl, n, e))
elif Type == "Sr":
if not g:
if not sg:
raise Exception("No station group defined, Sg line should come before station reference line.")
else:
g = StationGroup(sg)
for (filename, stationGroup) in self.g.items():
if stationGroup.conflict(g):
raise Exception("Station group already defined %s (%s)-(%s) by file %s." % (stationGroup.code, stationGroup.start, stationGroup.end, filename))
for (filename, network) in self.n.items():
if network.conflict(g):
raise Exception("Station group conflict network already defined %s (%s)-(%s) by file %s." % (network.code, network.start, network.end, filename))
try:
sr = Sr(Content)
except Exception as e:
raise Exception("Error while creating sr from '%s': %s" % (Content, e))
# Digest by Station Reference
try:
g.Sr(sr)
except DontFit:
raise Exception("%s does not fit in %s" % (sr, n))
except Exception as e:
raise Exception("Error while loading %s into %s: %s" % (sr, n, e))
elif Type == "Ia":
ias.append(Ia(Content))
elif Type == "Se":
obj = Se(Content)
elif Type == "Dl":
obj = Dl(Content)
elif Type == "Cl":
obj = Cl(Content)
elif Type == "Ff":
obj = Ff(self._filterFolder, Content)
elif Type == "If":
obj = Pz(Content,'D')
elif Type == "Pz":
obj = Pz(Content,'A')
else:
print(" Ignored line", line, file=sys.stderr)
## Process Instrument
if obj:
try:
for ia in self.ias: obj.Ia(ia) # Defaults
except Exception as e:
raise Exception("Error while loading (defaults) %s into %s: %s" % (ia, obj, e))
try:
for ia in ias: obj.Ia(ia) # Collected
except Exception as e:
raise Exception("Error while loading %s into %s: %s" % (ia, obj, e))
try:
self.i.add(obj)
except Exception as e:
raise Exception("Error while loading %s into Instruments db: %s" % (obj, e))
obj = None
# Process Network
if n:
self.n[tabFilename] = n
# Process Station Group
if g:
self.g[tabFilename] = g
except Exception as e:
raise e
finally:
if fd:
fd.close()
def check(self):
# Instrument alone check
if self.i.keys:
print("\nCheking Instruments Loaded:\n", file=sys.stderr)
error = self.i.check(self.n)
if error:
for e in error: print(e, file=sys.stderr)
else:
print("\nNo instruments loaded", file=sys.stderr)
# Cross Check
error = []
if self.n:
print("\nChecking Networks Loaded:\n", file=sys.stderr)
for network in self.n.values():
error.extend(network.check(self.i))
if error:
for e in error: print(e, file=sys.stderr)
else:
print("\nNo network/stations loaded.", file=sys.stderr)
def sc3Obj(self, sc3i = None):
if not sc3i:
sc3i = seiscomp.datamodel.Inventory()
for network in list(self.n.values()):
sc3n = network.sc3Obj(self.i)
sc3i.add(sc3n)
for sc3o in self.i.sc3Objs():
sc3i.add(sc3o)
self.stationResolver.collectStations(sc3i)
for stationGroup in list(self.g.values()):
sc3g = stationGroup.sc3Obj(self.stationResolver)
sc3i.add(sc3g)
return sc3i

View File

@ -0,0 +1,31 @@
0 1.219929e-16 0.000000e+00
1 3.161921e-10 0.000000e+00
2 -4.314652e-08 0.000000e+00
3 -5.635558e-07 0.000000e+00
4 -1.267008e-04 0.000000e+00
5 3.658144e-03 0.000000e+00
6 1.675314e-04 0.000000e+00
7 -5.404505e-03 0.000000e+00
8 1.278609e-02 0.000000e+00
9 -1.803566e-02 0.000000e+00
10 1.473116e-02 0.000000e+00
11 3.226941e-03 0.000000e+00
12 -3.859694e-02 0.000000e+00
13 8.883527e-02 0.000000e+00
14 -1.482427e-01 0.000000e+00
15 2.177661e-01 0.000000e+00
16 8.099144e-01 0.000000e+00
17 1.245959e-01 0.000000e+00
18 -1.230407e-01 0.000000e+00
19 8.899753e-02 0.000000e+00
20 -4.850157e-02 0.000000e+00
21 1.425912e-02 0.000000e+00
22 6.896391e-03 0.000000e+00
23 -1.444342e-02 0.000000e+00
24 1.242861e-02 0.000000e+00
25 -6.568726e-03 0.000000e+00
26 1.522040e-03 0.000000e+00
27 3.142093e-03 0.000000e+00
28 3.656274e-05 0.000000e+00
29 -2.152995e-06 0.000000e+00
30 -2.597827e-07 0.000000e+00

View File

@ -0,0 +1,65 @@
0 1.315493e-11 0.000000e+00
1 1.501065e-04 0.000000e+00
2 1.339681e-02 0.000000e+00
3 1.644292e-01 0.000000e+00
4 5.688094e-01 0.000000e+00
5 5.173835e-01 0.000000e+00
6 -2.608360e-01 0.000000e+00
7 -1.220329e-01 0.000000e+00
8 2.571813e-01 0.000000e+00
9 -2.029026e-01 0.000000e+00
10 7.075881e-02 0.000000e+00
11 3.879666e-02 0.000000e+00
12 -1.143135e-01 0.000000e+00
13 1.354797e-01 0.000000e+00
14 -1.114475e-01 0.000000e+00
15 6.705481e-02 0.000000e+00
16 -1.927124e-02 0.000000e+00
17 -2.093129e-02 0.000000e+00
18 4.768056e-02 0.000000e+00
19 -5.933829e-02 0.000000e+00
20 5.757931e-02 0.000000e+00
21 -4.623331e-02 0.000000e+00
22 2.977715e-02 0.000000e+00
23 -1.248294e-02 0.000000e+00
24 -2.366075e-03 0.000000e+00
25 1.278821e-02 0.000000e+00
26 -1.846982e-02 0.000000e+00
27 1.879725e-02 0.000000e+00
28 -1.713865e-02 0.000000e+00
29 1.278199e-02 0.000000e+00
30 -7.675787e-03 0.000000e+00
31 3.255159e-03 0.000000e+00
32 -8.947563e-05 0.000000e+00
33 -1.778758e-03 0.000000e+00
34 2.596043e-03 0.000000e+00
35 -2.666169e-03 0.000000e+00
36 2.307403e-03 0.000000e+00
37 -1.770516e-03 0.000000e+00
38 1.218643e-03 0.000000e+00
39 -7.460492e-04 0.000000e+00
40 3.921752e-04 0.000000e+00
41 -1.583665e-04 0.000000e+00
42 2.437801e-05 0.000000e+00
43 3.807573e-05 0.000000e+00
44 -5.618048e-05 0.000000e+00
45 5.152771e-05 0.000000e+00
46 -3.856469e-05 0.000000e+00
47 2.530286e-05 0.000000e+00
48 -1.512465e-05 0.000000e+00
49 8.739795e-06 0.000000e+00
50 -4.648117e-06 0.000000e+00
51 1.376276e-06 0.000000e+00
52 7.042064e-07 0.000000e+00
53 2.241873e-07 0.000000e+00
54 -1.251026e-06 0.000000e+00
55 1.066771e-07 0.000000e+00
56 2.642876e-07 0.000000e+00
57 3.226638e-07 0.000000e+00
58 -8.074162e-08 0.000000e+00
59 -1.099048e-07 0.000000e+00
60 -3.325203e-08 0.000000e+00
61 1.388506e-08 0.000000e+00
62 1.056275e-08 0.000000e+00
63 2.577911e-09 0.000000e+00
64 -7.018623e-10 0.000000e+00

View File

@ -0,0 +1,67 @@
0 -3.653417e-17 0.000000e+00
1 3.674881e-08 0.000000e+00
2 -4.270596e-07 0.000000e+00
3 1.145020e-06 0.000000e+00
4 -1.875941e-07 0.000000e+00
5 -3.372737e-07 0.000000e+00
6 2.787469e-06 0.000000e+00
7 -3.744026e-06 0.000000e+00
8 5.411719e-06 0.000000e+00
9 7.473363e-06 0.000000e+00
10 -5.177595e-04 0.000000e+00
11 2.106768e-04 0.000000e+00
12 4.632577e-05 0.000000e+00
13 -6.082222e-04 0.000000e+00
14 1.441747e-03 0.000000e+00
15 -2.406265e-03 0.000000e+00
16 3.225338e-03 0.000000e+00
17 -3.506390e-03 0.000000e+00
18 2.814411e-03 0.000000e+00
19 -7.719714e-04 0.000000e+00
20 -2.805119e-03 0.000000e+00
21 7.778055e-03 0.000000e+00
22 -1.358146e-02 0.000000e+00
23 1.917646e-02 0.000000e+00
24 -2.297035e-02 0.000000e+00
25 2.403979e-02 0.000000e+00
26 -2.209865e-02 0.000000e+00
27 8.607339e-03 0.000000e+00
28 1.175252e-02 0.000000e+00
29 -4.477868e-02 0.000000e+00
30 9.649231e-02 0.000000e+00
31 -1.917548e-01 0.000000e+00
32 5.276523e-01 0.000000e+00
33 7.241670e-01 0.000000e+00
34 -1.569053e-01 0.000000e+00
35 4.425742e-02 0.000000e+00
36 3.141684e-03 0.000000e+00
37 -2.667144e-02 0.000000e+00
38 3.615316e-02 0.000000e+00
39 -3.856867e-02 0.000000e+00
40 3.108417e-02 0.000000e+00
41 -2.352589e-02 0.000000e+00
42 1.532109e-02 0.000000e+00
43 -7.403983e-03 0.000000e+00
44 1.096454e-03 0.000000e+00
45 3.097965e-03 0.000000e+00
46 -5.193199e-03 0.000000e+00
47 5.561311e-03 0.000000e+00
48 -4.761101e-03 0.000000e+00
49 3.382132e-03 0.000000e+00
50 -1.920520e-03 0.000000e+00
51 7.152175e-04 0.000000e+00
52 7.677194e-05 0.000000e+00
53 -4.518973e-04 0.000000e+00
54 5.026997e-04 0.000000e+00
55 -5.650370e-04 0.000000e+00
56 -5.568005e-05 0.000000e+00
57 1.577356e-05 0.000000e+00
58 -1.419847e-06 0.000000e+00
59 8.149094e-07 0.000000e+00
60 6.807946e-07 0.000000e+00
61 -1.252728e-06 0.000000e+00
62 1.524350e-06 0.000000e+00
63 -2.833359e-07 0.000000e+00
64 -1.063838e-08 0.000000e+00
65 1.257120e-09 0.000000e+00
66 -5.429542e-11 0.000000e+00

View File

@ -0,0 +1,39 @@
0 4.189518e-13 0.000000e+00
1 3.303176e-04 0.000000e+00
2 1.029213e-03 0.000000e+00
3 -3.141228e-03 0.000000e+00
4 2.057093e-04 0.000000e+00
5 1.525213e-03 0.000000e+00
6 -6.231927e-03 0.000000e+00
7 1.048013e-02 0.000000e+00
8 -1.312025e-02 0.000000e+00
9 1.078214e-02 0.000000e+00
10 -1.444550e-03 0.000000e+00
11 -1.587295e-02 0.000000e+00
12 3.950740e-02 0.000000e+00
13 -6.510363e-02 0.000000e+00
14 8.537156e-02 0.000000e+00
15 -8.919134e-02 0.000000e+00
16 5.006189e-02 0.000000e+00
17 8.372328e-01 0.000000e+00
18 2.667231e-01 0.000000e+00
19 -1.666931e-01 0.000000e+00
20 9.528399e-02 0.000000e+00
21 -5.092177e-02 0.000000e+00
22 1.614584e-02 0.000000e+00
23 7.063624e-03 0.000000e+00
24 -1.838771e-02 0.000000e+00
25 1.994141e-02 0.000000e+00
26 -1.548951e-02 0.000000e+00
27 8.527354e-03 0.000000e+00
28 -2.557887e-03 0.000000e+00
29 -1.811026e-03 0.000000e+00
30 2.426493e-03 0.000000e+00
31 -3.757695e-03 0.000000e+00
32 4.672927e-04 0.000000e+00
33 6.330721e-04 0.000000e+00
34 -1.568741e-06 0.000000e+00
35 -1.254798e-05 0.000000e+00
36 3.210405e-07 0.000000e+00
37 -2.633241e-08 0.000000e+00
38 -5.099975e-08 0.000000e+00

View File

@ -0,0 +1,81 @@
0 6.915055e-16 0.000000e+00
1 9.981469e-07 0.000000e+00
2 8.986285e-05 0.000000e+00
3 3.536859e-04 0.000000e+00
4 -3.196747e-04 0.000000e+00
5 2.398310e-04 0.000000e+00
6 4.343304e-05 0.000000e+00
7 -6.140379e-04 0.000000e+00
8 1.450240e-03 0.000000e+00
9 -2.414179e-03 0.000000e+00
10 3.243791e-03 0.000000e+00
11 -3.565280e-03 0.000000e+00
12 2.956281e-03 0.000000e+00
13 -1.048729e-03 0.000000e+00
14 -2.353488e-03 0.000000e+00
15 7.146584e-03 0.000000e+00
16 -1.283558e-02 0.000000e+00
17 1.849560e-02 0.000000e+00
18 -2.280356e-02 0.000000e+00
19 2.414348e-02 0.000000e+00
20 -2.075420e-02 0.000000e+00
21 1.085375e-02 0.000000e+00
22 7.376841e-03 0.000000e+00
23 -3.628054e-02 0.000000e+00
24 8.073029e-02 0.000000e+00
25 -1.563791e-01 0.000000e+00
26 5.966318e-01 0.000000e+00
27 6.616155e-01 0.000000e+00
28 -1.985033e-01 0.000000e+00
29 5.962802e-02 0.000000e+00
30 -1.201563e-02 0.000000e+00
31 -2.031269e-02 0.000000e+00
32 3.489734e-02 0.000000e+00
33 -3.783039e-02 0.000000e+00
34 3.414802e-02 0.000000e+00
35 -2.681871e-02 0.000000e+00
36 1.805448e-02 0.000000e+00
37 -9.684112e-03 0.000000e+00
38 1.924548e-03 0.000000e+00
39 2.270220e-03 0.000000e+00
40 -4.929948e-03 0.000000e+00
41 5.783542e-03 0.000000e+00
42 -5.278113e-03 0.000000e+00
43 4.012361e-03 0.000000e+00
44 -2.512171e-03 0.000000e+00
45 1.166119e-03 0.000000e+00
46 -1.915292e-04 0.000000e+00
47 -3.549948e-04 0.000000e+00
48 5.355819e-04 0.000000e+00
49 -4.810171e-04 0.000000e+00
50 4.186318e-04 0.000000e+00
51 7.809605e-05 0.000000e+00
52 -5.470072e-06 0.000000e+00
53 -2.123757e-06 0.000000e+00
54 -6.620526e-07 0.000000e+00
55 7.238966e-07 0.000000e+00
56 1.013226e-06 0.000000e+00
57 -1.929203e-06 0.000000e+00
58 7.801228e-07 0.000000e+00
59 -7.887565e-07 0.000000e+00
60 5.818626e-07 0.000000e+00
61 3.221050e-08 0.000000e+00
62 -1.076378e-07 0.000000e+00
63 1.999555e-08 0.000000e+00
64 -7.052141e-08 0.000000e+00
65 -1.357645e-08 0.000000e+00
66 -3.311185e-08 0.000000e+00
67 1.552117e-08 0.000000e+00
68 -5.395556e-09 0.000000e+00
69 7.791274e-09 0.000000e+00
70 2.075919e-10 0.000000e+00
71 -9.326780e-10 0.000000e+00
72 1.850689e-09 0.000000e+00
73 -1.973863e-09 0.000000e+00
74 1.334281e-09 0.000000e+00
75 -6.315467e-10 0.000000e+00
76 6.994718e-11 0.000000e+00
77 1.148694e-10 0.000000e+00
78 -5.595614e-11 0.000000e+00
79 5.760568e-12 0.000000e+00
80 -5.489862e-12 0.000000e+00

View File

@ -0,0 +1,400 @@
0 -1.280410E-09 0.000000E+00
1 9.089140E-09 0.000000E+00
2 2.857200E-08 0.000000E+00
3 7.068940E-08 0.000000E+00
4 1.503850E-07 0.000000E+00
5 2.898420E-07 0.000000E+00
6 5.199920E-07 0.000000E+00
7 8.824160E-07 0.000000E+00
8 1.431250E-06 0.000000E+00
9 2.234920E-06 0.000000E+00
10 3.377490E-06 0.000000E+00
11 4.959500E-06 0.000000E+00
12 7.097790E-06 0.000000E+00
13 9.924440E-06 0.000000E+00
14 1.358420E-05 0.000000E+00
15 1.823040E-05 0.000000E+00
16 2.401920E-05 0.000000E+00
17 3.110180E-05 0.000000E+00
18 3.961540E-05 0.000000E+00
19 4.967160E-05 0.000000E+00
20 6.134480E-05 0.000000E+00
21 7.465790E-05 0.000000E+00
22 8.956970E-05 0.000000E+00
23 1.059620E-04 0.000000E+00
24 1.236260E-04 0.000000E+00
25 1.422580E-04 0.000000E+00
26 1.614470E-04 0.000000E+00
27 1.806800E-04 0.000000E+00
28 1.993440E-04 0.000000E+00
29 2.167350E-04 0.000000E+00
30 2.320800E-04 0.000000E+00
31 2.445590E-04 0.000000E+00
32 2.533370E-04 0.000000E+00
33 2.576020E-04 0.000000E+00
34 2.566110E-04 0.000000E+00
35 2.497330E-04 0.000000E+00
36 2.364990E-04 0.000000E+00
37 2.166500E-04 0.000000E+00
38 1.901760E-04 0.000000E+00
39 1.573550E-04 0.000000E+00
40 1.187790E-04 0.000000E+00
41 7.536150E-05 0.000000E+00
42 2.833800E-05 0.000000E+00
43 -2.075750E-05 0.000000E+00
44 -7.013260E-05 0.000000E+00
45 -1.177970E-04 0.000000E+00
46 -1.616380E-04 0.000000E+00
47 -1.995190E-04 0.000000E+00
48 -2.293810E-04 0.000000E+00
49 -2.493630E-04 0.000000E+00
50 -2.579120E-04 0.000000E+00
51 -2.539050E-04 0.000000E+00
52 -2.367430E-04 0.000000E+00
53 -2.064400E-04 0.000000E+00
54 -1.636770E-04 0.000000E+00
55 -1.098340E-04 0.000000E+00
56 -4.697750E-05 0.000000E+00
57 2.218660E-05 0.000000E+00
58 9.440430E-05 0.000000E+00
59 1.660030E-04 0.000000E+00
60 2.330560E-04 0.000000E+00
61 2.915810E-04 0.000000E+00
62 3.377580E-04 0.000000E+00
63 3.681570E-04 0.000000E+00
64 3.799620E-04 0.000000E+00
65 3.711900E-04 0.000000E+00
66 3.408650E-04 0.000000E+00
67 2.891620E-04 0.000000E+00
68 2.174900E-04 0.000000E+00
69 1.285060E-04 0.000000E+00
70 2.606830E-05 0.000000E+00
71 -8.490010E-05 0.000000E+00
72 -1.986100E-04 0.000000E+00
73 -3.086790E-04 0.000000E+00
74 -4.084630E-04 0.000000E+00
75 -4.914240E-04 0.000000E+00
76 -5.515290E-04 0.000000E+00
77 -5.836450E-04 0.000000E+00
78 -5.839130E-04 0.000000E+00
79 -5.500750E-04 0.000000E+00
80 -4.817300E-04 0.000000E+00
81 -3.804970E-04 0.000000E+00
82 -2.500650E-04 0.000000E+00
83 -9.613190E-05 0.000000E+00
84 7.379770E-05 0.000000E+00
85 2.507300E-04 0.000000E+00
86 4.246150E-04 0.000000E+00
87 5.848830E-04 0.000000E+00
88 7.210410E-04 0.000000E+00
89 8.233180E-04 0.000000E+00
90 8.833110E-04 0.000000E+00
91 8.945860E-04 0.000000E+00
92 8.532140E-04 0.000000E+00
93 7.581840E-04 0.000000E+00
94 6.116610E-04 0.000000E+00
95 4.190820E-04 0.000000E+00
96 1.890410E-04 0.000000E+00
97 -6.701870E-05 0.000000E+00
98 -3.353110E-04 0.000000E+00
99 -6.003940E-04 0.000000E+00
100 -8.460070E-04 0.000000E+00
101 -1.056010E-03 0.000000E+00
102 -1.215390E-03 0.000000E+00
103 -1.311250E-03 0.000000E+00
104 -1.333740E-03 0.000000E+00
105 -1.276860E-03 0.000000E+00
106 -1.139110E-03 0.000000E+00
107 -9.238090E-04 0.000000E+00
108 -6.392740E-04 0.000000E+00
109 -2.985730E-04 0.000000E+00
110 8.095210E-05 0.000000E+00
111 4.784920E-04 0.000000E+00
112 8.708350E-04 0.000000E+00
113 1.233650E-03 0.000000E+00
114 1.542910E-03 0.000000E+00
115 1.776410E-03 0.000000E+00
116 1.915250E-03 0.000000E+00
117 1.945200E-03 0.000000E+00
118 1.857870E-03 0.000000E+00
119 1.651590E-03 0.000000E+00
120 1.331930E-03 0.000000E+00
121 9.117790E-04 0.000000E+00
122 4.110140E-04 0.000000E+00
123 -1.443240E-04 0.000000E+00
124 -7.232630E-04 0.000000E+00
125 -1.291520E-03 0.000000E+00
126 -1.813440E-03 0.000000E+00
127 -2.254090E-03 0.000000E+00
128 -2.581490E-03 0.000000E+00
129 -2.768760E-03 0.000000E+00
130 -2.796120E-03 0.000000E+00
131 -2.652470E-03 0.000000E+00
132 -2.336640E-03 0.000000E+00
133 -1.858050E-03 0.000000E+00
134 -1.236750E-03 0.000000E+00
135 -5.027860E-04 0.000000E+00
136 3.050470E-04 0.000000E+00
137 1.141090E-03 0.000000E+00
138 1.955230E-03 0.000000E+00
139 2.695760E-03 0.000000E+00
140 3.312460E-03 0.000000E+00
141 3.759760E-03 0.000000E+00
142 3.999910E-03 0.000000E+00
143 4.005660E-03 0.000000E+00
144 3.762670E-03 0.000000E+00
145 3.271090E-03 0.000000E+00
146 2.546440E-03 0.000000E+00
147 1.619580E-03 0.000000E+00
148 5.357070E-04 0.000000E+00
149 -6.475150E-04 0.000000E+00
150 -1.862780E-03 0.000000E+00
151 -3.036670E-03 0.000000E+00
152 -4.093770E-03 0.000000E+00
153 -4.961150E-03 0.000000E+00
154 -5.573010E-03 0.000000E+00
155 -5.875080E-03 0.000000E+00
156 -5.828670E-03 0.000000E+00
157 -5.414010E-03 0.000000E+00
158 -4.632620E-03 0.000000E+00
159 -3.508570E-03 0.000000E+00
160 -2.088510E-03 0.000000E+00
161 -4.402630E-04 0.000000E+00
162 1.349800E-03 0.000000E+00
163 3.180770E-03 0.000000E+00
164 4.942220E-03 0.000000E+00
165 6.520130E-03 0.000000E+00
166 7.803440E-03 0.000000E+00
167 8.690760E-03 0.000000E+00
168 9.097010E-03 0.000000E+00
169 8.959570E-03 0.000000E+00
170 8.243470E-03 0.000000E+00
171 6.945480E-03 0.000000E+00
172 5.096570E-03 0.000000E+00
173 2.762750E-03 0.000000E+00
174 4.398920E-05 0.000000E+00
175 -2.928690E-03 0.000000E+00
176 -5.998030E-03 0.000000E+00
177 -8.986910E-03 0.000000E+00
178 -1.170620E-02 0.000000E+00
179 -1.396360E-02 0.000000E+00
180 -1.557300E-02 0.000000E+00
181 -1.636440E-02 0.000000E+00
182 -1.619300E-02 0.000000E+00
183 -1.494760E-02 0.000000E+00
184 -1.255800E-02 0.000000E+00
185 -9.000540E-03 0.000000E+00
186 -4.301130E-03 0.000000E+00
187 1.463060E-03 0.000000E+00
188 8.165080E-03 0.000000E+00
189 1.563180E-02 0.000000E+00
190 2.364960E-02 0.000000E+00
191 3.197290E-02 0.000000E+00
192 4.033310E-02 0.000000E+00
193 4.845020E-02 0.000000E+00
194 5.604420E-02 0.000000E+00
195 6.284710E-02 0.000000E+00
196 6.861480E-02 0.000000E+00
197 7.313740E-02 0.000000E+00
198 7.624880E-02 0.000000E+00
199 7.783390E-02 0.000000E+00
200 7.783390E-02 0.000000E+00
201 7.624880E-02 0.000000E+00
202 7.313740E-02 0.000000E+00
203 6.861480E-02 0.000000E+00
204 6.284710E-02 0.000000E+00
205 5.604420E-02 0.000000E+00
206 4.845020E-02 0.000000E+00
207 4.033310E-02 0.000000E+00
208 3.197290E-02 0.000000E+00
209 2.364960E-02 0.000000E+00
210 1.563180E-02 0.000000E+00
211 8.165080E-03 0.000000E+00
212 1.463060E-03 0.000000E+00
213 -4.301130E-03 0.000000E+00
214 -9.000540E-03 0.000000E+00
215 -1.255800E-02 0.000000E+00
216 -1.494760E-02 0.000000E+00
217 -1.619300E-02 0.000000E+00
218 -1.636440E-02 0.000000E+00
219 -1.557300E-02 0.000000E+00
220 -1.396360E-02 0.000000E+00
221 -1.170620E-02 0.000000E+00
222 -8.986910E-03 0.000000E+00
223 -5.998030E-03 0.000000E+00
224 -2.928690E-03 0.000000E+00
225 4.398920E-05 0.000000E+00
226 2.762750E-03 0.000000E+00
227 5.096570E-03 0.000000E+00
228 6.945480E-03 0.000000E+00
229 8.243470E-03 0.000000E+00
230 8.959570E-03 0.000000E+00
231 9.097010E-03 0.000000E+00
232 8.690760E-03 0.000000E+00
233 7.803440E-03 0.000000E+00
234 6.520130E-03 0.000000E+00
235 4.942220E-03 0.000000E+00
236 3.180770E-03 0.000000E+00
237 1.349800E-03 0.000000E+00
238 -4.402630E-04 0.000000E+00
239 -2.088510E-03 0.000000E+00
240 -3.508570E-03 0.000000E+00
241 -4.632620E-03 0.000000E+00
242 -5.414010E-03 0.000000E+00
243 -5.828670E-03 0.000000E+00
244 -5.875080E-03 0.000000E+00
245 -5.573010E-03 0.000000E+00
246 -4.961150E-03 0.000000E+00
247 -4.093770E-03 0.000000E+00
248 -3.036670E-03 0.000000E+00
249 -1.862780E-03 0.000000E+00
250 -6.475150E-04 0.000000E+00
251 5.357070E-04 0.000000E+00
252 1.619580E-03 0.000000E+00
253 2.546440E-03 0.000000E+00
254 3.271090E-03 0.000000E+00
255 3.762670E-03 0.000000E+00
256 4.005660E-03 0.000000E+00
257 3.999910E-03 0.000000E+00
258 3.759760E-03 0.000000E+00
259 3.312460E-03 0.000000E+00
260 2.695760E-03 0.000000E+00
261 1.955230E-03 0.000000E+00
262 1.141090E-03 0.000000E+00
263 3.050470E-04 0.000000E+00
264 -5.027860E-04 0.000000E+00
265 -1.236750E-03 0.000000E+00
266 -1.858050E-03 0.000000E+00
267 -2.336640E-03 0.000000E+00
268 -2.652470E-03 0.000000E+00
269 -2.796120E-03 0.000000E+00
270 -2.768760E-03 0.000000E+00
271 -2.581490E-03 0.000000E+00
272 -2.254090E-03 0.000000E+00
273 -1.813440E-03 0.000000E+00
274 -1.291520E-03 0.000000E+00
275 -7.232630E-04 0.000000E+00
276 -1.443240E-04 0.000000E+00
277 4.110140E-04 0.000000E+00
278 9.117790E-04 0.000000E+00
279 1.331930E-03 0.000000E+00
280 1.651590E-03 0.000000E+00
281 1.857870E-03 0.000000E+00
282 1.945200E-03 0.000000E+00
283 1.915250E-03 0.000000E+00
284 1.776410E-03 0.000000E+00
285 1.542910E-03 0.000000E+00
286 1.233650E-03 0.000000E+00
287 8.708350E-04 0.000000E+00
288 4.784920E-04 0.000000E+00
289 8.095210E-05 0.000000E+00
290 -2.985730E-04 0.000000E+00
291 -6.392740E-04 0.000000E+00
292 -9.238090E-04 0.000000E+00
293 -1.139110E-03 0.000000E+00
294 -1.276860E-03 0.000000E+00
295 -1.333740E-03 0.000000E+00
296 -1.311250E-03 0.000000E+00
297 -1.215390E-03 0.000000E+00
298 -1.056010E-03 0.000000E+00
299 -8.460070E-04 0.000000E+00
300 -6.003940E-04 0.000000E+00
301 -3.353110E-04 0.000000E+00
302 -6.701870E-05 0.000000E+00
303 1.890410E-04 0.000000E+00
304 4.190820E-04 0.000000E+00
305 6.116610E-04 0.000000E+00
306 7.581840E-04 0.000000E+00
307 8.532140E-04 0.000000E+00
308 8.945860E-04 0.000000E+00
309 8.833110E-04 0.000000E+00
310 8.233180E-04 0.000000E+00
311 7.210410E-04 0.000000E+00
312 5.848830E-04 0.000000E+00
313 4.246150E-04 0.000000E+00
314 2.507300E-04 0.000000E+00
315 7.379770E-05 0.000000E+00
316 -9.613190E-05 0.000000E+00
317 -2.500650E-04 0.000000E+00
318 -3.804970E-04 0.000000E+00
319 -4.817300E-04 0.000000E+00
320 -5.500750E-04 0.000000E+00
321 -5.839130E-04 0.000000E+00
322 -5.836450E-04 0.000000E+00
323 -5.515290E-04 0.000000E+00
324 -4.914240E-04 0.000000E+00
325 -4.084630E-04 0.000000E+00
326 -3.086790E-04 0.000000E+00
327 -1.986100E-04 0.000000E+00
328 -8.490010E-05 0.000000E+00
329 2.606830E-05 0.000000E+00
330 1.285060E-04 0.000000E+00
331 2.174900E-04 0.000000E+00
332 2.891620E-04 0.000000E+00
333 3.408650E-04 0.000000E+00
334 3.711900E-04 0.000000E+00
335 3.799620E-04 0.000000E+00
336 3.681570E-04 0.000000E+00
337 3.377580E-04 0.000000E+00
338 2.915810E-04 0.000000E+00
339 2.330560E-04 0.000000E+00
340 1.660030E-04 0.000000E+00
341 9.440430E-05 0.000000E+00
342 2.218660E-05 0.000000E+00
343 -4.697750E-05 0.000000E+00
344 -1.098340E-04 0.000000E+00
345 -1.636770E-04 0.000000E+00
346 -2.064400E-04 0.000000E+00
347 -2.367430E-04 0.000000E+00
348 -2.539050E-04 0.000000E+00
349 -2.579120E-04 0.000000E+00
350 -2.493630E-04 0.000000E+00
351 -2.293810E-04 0.000000E+00
352 -1.995190E-04 0.000000E+00
353 -1.616380E-04 0.000000E+00
354 -1.177970E-04 0.000000E+00
355 -7.013260E-05 0.000000E+00
356 -2.075750E-05 0.000000E+00
357 2.833800E-05 0.000000E+00
358 7.536150E-05 0.000000E+00
359 1.187790E-04 0.000000E+00
360 1.573550E-04 0.000000E+00
361 1.901760E-04 0.000000E+00
362 2.166500E-04 0.000000E+00
363 2.364990E-04 0.000000E+00
364 2.497330E-04 0.000000E+00
365 2.566110E-04 0.000000E+00
366 2.576020E-04 0.000000E+00
367 2.533370E-04 0.000000E+00
368 2.445590E-04 0.000000E+00
369 2.320800E-04 0.000000E+00
370 2.167350E-04 0.000000E+00
371 1.993440E-04 0.000000E+00
372 1.806800E-04 0.000000E+00
373 1.614470E-04 0.000000E+00
374 1.422580E-04 0.000000E+00
375 1.236260E-04 0.000000E+00
376 1.059620E-04 0.000000E+00
377 8.956970E-05 0.000000E+00
378 7.465790E-05 0.000000E+00
379 6.134480E-05 0.000000E+00
380 4.967160E-05 0.000000E+00
381 3.961540E-05 0.000000E+00
382 3.110180E-05 0.000000E+00
383 2.401920E-05 0.000000E+00
384 1.823040E-05 0.000000E+00
385 1.358420E-05 0.000000E+00
386 9.924440E-06 0.000000E+00
387 7.097790E-06 0.000000E+00
388 4.959500E-06 0.000000E+00
389 3.377490E-06 0.000000E+00
390 2.234920E-06 0.000000E+00
391 1.431250E-06 0.000000E+00
392 8.824160E-07 0.000000E+00
393 5.199920E-07 0.000000E+00
394 2.898420E-07 0.000000E+00
395 1.503850E-07 0.000000E+00
396 7.068940E-08 0.000000E+00
397 2.857200E-08 0.000000E+00
398 9.089140E-09 0.000000E+00
399 -1.280410E-09 0.000000E+00

View File

@ -0,0 +1,96 @@
0 -4.624365e-06 0.000000e+00
1 -8.258298e-05 0.000000e+00
2 -2.260141e-04 0.000000e+00
3 -2.539009e-04 0.000000e+00
4 7.665667e-07 0.000000e+00
5 3.050186e-04 0.000000e+00
6 1.712792e-04 0.000000e+00
7 -3.494469e-04 0.000000e+00
8 -4.491013e-04 0.000000e+00
9 2.631577e-04 0.000000e+00
10 7.897725e-04 0.000000e+00
11 3.857301e-05 0.000000e+00
12 -1.091783e-03 0.000000e+00
13 -5.999956e-04 0.000000e+00
14 1.206435e-03 0.000000e+00
15 1.397154e-03 0.000000e+00
16 -9.624677e-04 0.000000e+00
17 -2.313273e-03 0.000000e+00
18 2.078273e-04 0.000000e+00
19 3.130074e-03 0.000000e+00
20 1.137016e-03 0.000000e+00
21 -3.543348e-03 0.000000e+00
22 -3.024242e-03 0.000000e+00
23 3.207636e-03 0.000000e+00
24 5.238007e-03 0.000000e+00
25 -1.803839e-03 0.000000e+00
26 -7.375909e-03 0.000000e+00
27 -8.729728e-04 0.000000e+00
28 8.870910e-03 0.000000e+00
29 4.831847e-03 0.000000e+00
30 -9.042305e-03 0.000000e+00
31 -9.813905e-03 0.000000e+00
32 7.179136e-03 0.000000e+00
33 1.525300e-02 0.000000e+00
34 -2.628732e-03 0.000000e+00
35 -2.026759e-02 0.000000e+00
36 -5.142914e-03 0.000000e+00
37 2.366362e-02 0.000000e+00
38 1.657857e-02 0.000000e+00
39 -2.387548e-02 0.000000e+00
40 -3.227953e-02 0.000000e+00
41 1.860678e-02 0.000000e+00
42 5.394208e-02 0.000000e+00
43 -3.140518e-03 0.000000e+00
44 -8.849621e-02 0.000000e+00
45 -4.014856e-02 0.000000e+00
46 1.847636e-01 0.000000e+00
47 4.066011e-01 0.000000e+00
48 4.066011e-01 0.000000e+00
49 1.847636e-01 0.000000e+00
50 -4.014856e-02 0.000000e+00
51 -8.849621e-02 0.000000e+00
52 -3.140518e-03 0.000000e+00
53 5.394208e-02 0.000000e+00
54 1.860678e-02 0.000000e+00
55 -3.227953e-02 0.000000e+00
56 -2.387548e-02 0.000000e+00
57 1.657857e-02 0.000000e+00
58 2.366362e-02 0.000000e+00
59 -5.142914e-03 0.000000e+00
60 -2.026759e-02 0.000000e+00
61 -2.628732e-03 0.000000e+00
62 1.525300e-02 0.000000e+00
63 7.179136e-03 0.000000e+00
64 -9.813905e-03 0.000000e+00
65 -9.042305e-03 0.000000e+00
66 4.831847e-03 0.000000e+00
67 8.870910e-03 0.000000e+00
68 -8.729728e-04 0.000000e+00
69 -7.375909e-03 0.000000e+00
70 -1.803839e-03 0.000000e+00
71 5.238007e-03 0.000000e+00
72 3.207636e-03 0.000000e+00
73 -3.024242e-03 0.000000e+00
74 -3.543348e-03 0.000000e+00
75 1.137016e-03 0.000000e+00
76 3.130074e-03 0.000000e+00
77 2.078273e-04 0.000000e+00
78 -2.313273e-03 0.000000e+00
79 -9.624677e-04 0.000000e+00
80 1.397154e-03 0.000000e+00
81 1.206435e-03 0.000000e+00
82 -5.999956e-04 0.000000e+00
83 -1.091783e-03 0.000000e+00
84 3.857301e-05 0.000000e+00
85 7.897725e-04 0.000000e+00
86 2.631577e-04 0.000000e+00
87 -4.491013e-04 0.000000e+00
88 -3.494469e-04 0.000000e+00
89 1.712792e-04 0.000000e+00
90 3.050186e-04 0.000000e+00
91 7.665667e-07 0.000000e+00
92 -2.539009e-04 0.000000e+00
93 -2.260141e-04 0.000000e+00
94 -8.258298e-05 0.000000e+00
95 -4.624365e-06 0.000000e+00

View File

@ -0,0 +1,160 @@
0 4.032461e-05 0.000000e+00
1 7.453280e-05 0.000000e+00
2 1.234553e-04 0.000000e+00
3 1.701887e-04 0.000000e+00
4 1.973105e-04 0.000000e+00
5 1.854891e-04 0.000000e+00
6 1.193456e-04 0.000000e+00
7 -5.723101e-06 0.000000e+00
8 -1.779232e-04 0.000000e+00
9 -3.673259e-04 0.000000e+00
10 -5.295104e-04 0.000000e+00
11 -6.150085e-04 0.000000e+00
12 -5.832354e-04 0.000000e+00
13 -4.172837e-04 0.000000e+00
14 -1.349516e-04 0.000000e+00
15 2.083330e-04 0.000000e+00
16 5.277090e-04 0.000000e+00
17 7.281899e-04 0.000000e+00
18 7.312587e-04 0.000000e+00
19 5.019202e-04 0.000000e+00
20 6.783176e-05 0.000000e+00
21 -4.771493e-04 0.000000e+00
22 -9.891580e-04 0.000000e+00
23 -1.308918e-03 0.000000e+00
24 -1.307358e-03 0.000000e+00
25 -9.300168e-04 0.000000e+00
26 -2.262541e-04 0.000000e+00
27 6.483476e-04 0.000000e+00
28 1.461708e-03 0.000000e+00
29 1.963222e-03 0.000000e+00
30 1.956625e-03 0.000000e+00
31 1.367725e-03 0.000000e+00
32 2.854628e-04 0.000000e+00
33 -1.040387e-03 0.000000e+00
34 -2.250679e-03 0.000000e+00
35 -2.969069e-03 0.000000e+00
36 -2.912737e-03 0.000000e+00
37 -1.990583e-03 0.000000e+00
38 -3.573537e-04 0.000000e+00
39 1.598840e-03 0.000000e+00
40 3.340972e-03 0.000000e+00
41 4.323764e-03 0.000000e+00
42 4.155636e-03 0.000000e+00
43 2.736002e-03 0.000000e+00
44 3.234310e-04 0.000000e+00
45 -2.494752e-03 0.000000e+00
46 -4.934943e-03 0.000000e+00
47 -6.225197e-03 0.000000e+00
48 -5.836136e-03 0.000000e+00
49 -3.668966e-03 0.000000e+00
50 -1.394092e-04 0.000000e+00
51 3.880228e-03 0.000000e+00
52 7.261232e-03 0.000000e+00
53 8.919356e-03 0.000000e+00
54 8.140252e-03 0.000000e+00
55 4.837050e-03 0.000000e+00
56 -3.434785e-04 0.000000e+00
57 -6.115665e-03 0.000000e+00
58 -1.084778e-02 0.000000e+00
59 -1.299272e-02 0.000000e+00
60 -1.154995e-02 0.000000e+00
61 -6.430376e-03 0.000000e+00
62 1.391199e-03 0.000000e+00
63 1.000571e-02 0.000000e+00
64 1.698057e-02 0.000000e+00
65 1.997340e-02 0.000000e+00
66 1.740665e-02 0.000000e+00
67 9.029463e-03 0.000000e+00
68 -3.794969e-03 0.000000e+00
69 -1.818304e-02 0.000000e+00
70 -3.022295e-02 0.000000e+00
71 -3.578333e-02 0.000000e+00
72 -3.146898e-02 0.000000e+00
73 -1.550444e-02 0.000000e+00
74 1.167237e-02 0.000000e+00
75 4.726833e-02 0.000000e+00
76 8.650819e-02 0.000000e+00
77 1.234668e-01 0.000000e+00
78 1.521942e-01 0.000000e+00
79 1.678939e-01 0.000000e+00
80 1.678939e-01 0.000000e+00
81 1.521942e-01 0.000000e+00
82 1.234668e-01 0.000000e+00
83 8.650819e-02 0.000000e+00
84 4.726833e-02 0.000000e+00
85 1.167237e-02 0.000000e+00
86 -1.550444e-02 0.000000e+00
87 -3.146898e-02 0.000000e+00
88 -3.578333e-02 0.000000e+00
89 -3.022295e-02 0.000000e+00
90 -1.818304e-02 0.000000e+00
91 -3.794969e-03 0.000000e+00
92 9.029463e-03 0.000000e+00
93 1.740665e-02 0.000000e+00
94 1.997340e-02 0.000000e+00
95 1.698057e-02 0.000000e+00
96 1.000571e-02 0.000000e+00
97 1.391199e-03 0.000000e+00
98 -6.430376e-03 0.000000e+00
99 -1.154995e-02 0.000000e+00
100 -1.299272e-02 0.000000e+00
101 -1.084778e-02 0.000000e+00
102 -6.115665e-03 0.000000e+00
103 -3.434785e-04 0.000000e+00
104 4.837050e-03 0.000000e+00
105 8.140252e-03 0.000000e+00
106 8.919356e-03 0.000000e+00
107 7.261232e-03 0.000000e+00
108 3.880228e-03 0.000000e+00
109 -1.394092e-04 0.000000e+00
110 -3.668966e-03 0.000000e+00
111 -5.836136e-03 0.000000e+00
112 -6.225197e-03 0.000000e+00
113 -4.934943e-03 0.000000e+00
114 -2.494752e-03 0.000000e+00
115 3.234310e-04 0.000000e+00
116 2.736002e-03 0.000000e+00
117 4.155636e-03 0.000000e+00
118 4.323764e-03 0.000000e+00
119 3.340972e-03 0.000000e+00
120 1.598840e-03 0.000000e+00
121 -3.573537e-04 0.000000e+00
122 -1.990583e-03 0.000000e+00
123 -2.912737e-03 0.000000e+00
124 -2.969069e-03 0.000000e+00
125 -2.250679e-03 0.000000e+00
126 -1.040387e-03 0.000000e+00
127 2.854628e-04 0.000000e+00
128 1.367725e-03 0.000000e+00
129 1.956625e-03 0.000000e+00
130 1.963222e-03 0.000000e+00
131 1.461708e-03 0.000000e+00
132 6.483476e-04 0.000000e+00
133 -2.262541e-04 0.000000e+00
134 -9.300168e-04 0.000000e+00
135 -1.307358e-03 0.000000e+00
136 -1.308918e-03 0.000000e+00
137 -9.891580e-04 0.000000e+00
138 -4.771493e-04 0.000000e+00
139 6.783176e-05 0.000000e+00
140 5.019202e-04 0.000000e+00
141 7.312587e-04 0.000000e+00
142 7.281899e-04 0.000000e+00
143 5.277090e-04 0.000000e+00
144 2.083330e-04 0.000000e+00
145 -1.349516e-04 0.000000e+00
146 -4.172837e-04 0.000000e+00
147 -5.832354e-04 0.000000e+00
148 -6.150085e-04 0.000000e+00
149 -5.295104e-04 0.000000e+00
150 -3.673259e-04 0.000000e+00
151 -1.779232e-04 0.000000e+00
152 -5.723101e-06 0.000000e+00
153 1.193456e-04 0.000000e+00
154 1.854891e-04 0.000000e+00
155 1.973105e-04 0.000000e+00
156 1.701887e-04 0.000000e+00
157 1.234553e-04 0.000000e+00
158 7.453280e-05 0.000000e+00
159 4.032461e-05 0.000000e+00

View File

@ -0,0 +1,73 @@
# Begin data logger list
# Gain max.spfr mcld IIR(A,I)/FIR filter stages (not mandatory)
Ia: DigitizerModel="M24" M24-SC M24/BW
Ia: DigitizerModel="Q330" Q330/N Q330/HR Q330-SC
Ia: RecorderModel="M24" M24-SC M24/BW
Ia: RecorderModel="SeisComP" Q330-SC
Ia: RecorderModel="Q330" Q330/N Q330/HR
Ia: RecorderManufacturer="Quanterra" Q330/N Q330/HR
Ia: RecorderManufacturer="Lennartz" M24-SC M24/BW
Ia: RecorderManufacturer="Alpha2000" Q330-SC
Ia: DigitizerManufacturer="Quanterra" Q330/N Q330/HR Q330-SC
Ia: DigitizerManufacturer="Lennartz" M24-SC M24/BW
# Gain max.spfr mcld IIR(A,I)/FIR filter stages (not mandatory)
Dl: Q330/N 419430.0 100.0 0.0 Q330 200,100_1,50_2,40_3,20_4,1_5,0.1_5/10
Dl: Q330/HR 1677720.0 100.0 0.0 Q330 100_1,50_2,40_3,20_4,1_5,0.1_5/10
Dl: Q330-SC 419430.0 100.0 0.0 Q330 100_1,50_1/6,20_1/7,1_1/7/8/9,0.1_1/7/8/9/10
#
# End data logger list
# FIR filter list for Quanterra Q330 digitizer and Seiscomp recorder
# Name Sym ncf inrate fac delay corrtn gain frg
Ff: Q330_FIR_1 q330_b100_100 A 65 0 100.0 1 0.041607 0.041607 1.0 0.0
Ff: Q330_FIR_2 q330_b100_50 A 81 0 50.0 1 0.531607 0.531607 1.0 0.0
Ff: Q330_FIR_3 q330_b100_40 A 39 0 40.0 1 0.430462 0.430462 1.0 0.0
Ff: Q330_FIR_4 q330_b100_20 A 67 0 20.0 1 1.630462 1.630462 1.0 0.0
Ff: Q330_FIR_5 q330_b100_1 A 31 0 1.0 1 15.930462 15.930462 1.0 0.0
Ff: Q330_FIR_6 scp_deci2.1 C 48 0 100.0 2 0.000 0.0 1.0 0.0
Ff: Q330_FIR_7 scp_deci5.1 C 80 0 100.0 5 0.000 0.0 1.0 0.0
Ff: Q330_FIR_8 scp_deci2.1 C 48 0 20.0 2 0.000 0.0 1.0 0.0
Ff: Q330_FIR_9 scp_deci10.1 C 200 0 10.0 10 0.000 0.0 1.0 0.0
Ff: Q330_FIR_10 scp_deci10.1 C 200 0 1.0 10 0.000 0.0 4.0 0.0
# Digitizer IIR filter response list
#
# Digitizer analog response list
#
# Begin seismometer list
# Seismometer analog response list
# . Gain frgn Norm.fac fnr nz np Zeros&Poles
# Sensor type: VBB
Ia: Model="STS-2/CZ" STS-2/CZ
Ia: Model="STS-2/N" STS-2/N
Ia: Model="STS-2/G2" STS-2/G2
Ia: Model="STS-2/HG" STS-2/HG
Ia: Model="STS-2/G1" STS-2/G1
Ia: Model="STS-2/G3" STS-2/G3
Ia: Type="VBB" STS-2/CZ STS-2/N STS-2/G2 STS-2/HG STS-2/G3 STS-2/G1
Ia: Unit="M/S" STS-2/CZ STS-2/N STS-2/G2 STS-2/HG STS-2/G3 STS-2/G1
Ia: Manufacturer="Streckeisen" STS-2/CZ STS-2/N STS-2/G2 STS-2/HG STS-2/G3 STS-2/G1
Se: STS-2/N 1500.0 0.02 6.0077e7 1.0 2 5 2(0.0,0.0) (-0.037004,0.037016) (-0.037004,-0.037016) (-251.33,0.0) (-131.04,-467.29) (-131.04,467.29)
Se: STS-2/G1 1500.0 0.02 3.46844e17 1.0 5 9 2(0.0,0.0) (-15.15,0.0) (-318.6,401.2) (-318.6,-401.2) (-0.037,0.037) (-0.037,-0.037) (-15.99,0.0) (-100.9,401.9) (-100.9,-401.9) (-187.2,0.0) (-417.1,0.0) (-7454.0,7142.0) (-7454.0,-7142.0)
Se: STS-2/G2 1500.0 0.02 3.46844e17 1.0 9 14 2(0.0,0.0) (-10.75,0.0) (-294.6,0.0) (-555.1,0.0) (-683.9,175.5) (-683.9,-175.5) (-5907.0,3411.0) (-5907.0,-3411.0) (-0.037,0.037) (-0.037,-0.037) (-10.95,0.0) (-98.44,442.8) (-98.44,-442.8) (-251.1,0.0) (-556.8,60.0) (-556.8,-60.0) (-1391.0,0.0) (-4936.0,4713.0) (-4936.0,-4713.0) (-6227.0,0.0) (-6909.0,9208.0) (-6909.0,-9208.0)
Se: STS-2/G3 1500.0 0.02 3.46844e17 1.0 6 11 2(0.0,0.0) (-15.15,0.0) (-176.6,0.0) (-463.1,430.5) (-463.1,-430.5) (-0.037,0.037) (-0.037,-0.037) (-15.64,0.0) (-97.34,-400.7) (-97.34,400.7) (-255.1,0.0) (-374.8,0.0) (-520.3,0.0) (-10530.,10050.) (-10530.,-10050.) (-13300.,0.0)
#Streckeisen_STS-2/HG> 20000.0 0.02 3.46844e17 1.0 6 11 2(0.0,0.0) (-15.15,0.0) (-176.6,0.0) (-463.1,430.5) (-463.1,430.5) (-0.037,0.037) (-0.037,-0.037) (-15.64,0.0) (-97.34,-400.7) (-97.34,400.7) (-255.1,0.0) (-374.8,0.0) (-520.3,0.0) (-10530.,10050.) (-10530.,-10050.) (-13300.,0.0)
#Streckeisen_STS-2/CZ> 1500.0 1.0 4.47172e2 1.0 6 7 2(0.0,0.0) (-15.1488,0.0) (-199.554,0.0) (-461.814,429.079) (-461.814,-429.079) (-0.03702,0.03702) (-0.03702,-0.03702) (-15.2744,0.0) (-82.8124,409.852) (-82.8124,-409.852) (-443.314,0.0) (-454.526,0.0)
# End seismometer list

View File

@ -0,0 +1,291 @@
#!/usr/bin/env python
###############################################################################
# Copyright (C) 2020 Helmholtz-Zentrum Potsdam - Deutsches
# GeoForschungsZentrum GFZ
#
# License: GPL Affero General Public License (GNU AGPL) version 3.0
# Author: Peter L. Evans
# E-mail: <pevans@gfz-potsdam.de>
#
###############################################################################
from __future__ import print_function
from nettab.tab import Tab
import json
import os
import sys
import tempfile
import unittest
# Just to dump XML output??:
try:
import seiscomp.io as IO
except ImportError:
print('Failed to import seiscomp.io module, trying seiscomp3.IO instead')
from seiscomp3 import IO
# Just to examine the output XML:
import xml.etree.ElementTree as ET
def xmlparse(filename):
parser = ET.XMLParser()
try:
parser.feed(open(filename).read())
except Exception:
raise
elem = parser.close()
ns = '{http://geofon.gfz-potsdam.de/ns/seiscomp3-schema/0.11}'
return (elem, ns)
class TestTab(unittest.TestCase):
simpleTab = '''
Nw: QQ 2020-04-01
Na: Description="Atlantis Seismic Network"
Sl: AA01 "Pillars of Hercules/Atlantis" Q330/N%xxxx STS-2/N%yyyy 100/20 ZNE 30.0 -15.0 -900 2.0 2020-04-02
'''
tabWithPid = '''
Nw: QQ 2020-04-01
Na: Description="Atlantis Seismic Network"
Na: Pid="doi:10.1234/xyz"
Sl: AA01 "Pillars of Hercules/Atlantis" Q330/N%xxxx STS-2/N%yyyy 100/20 ZNE 30.0 -15.0 -900 2.0 2020-04-02
'''
instFile = 'small-inst.db'
templateTab = '''
Nw: {nwline}
Na: {naline}
Sl: {slline}
'''
def _writeTempTab(self, tabText):
'''Put a nettab formatted string into a temporary file,
returning the file name.
'''
with tempfile.NamedTemporaryFile(delete=False) as tab:
print(tabText, file=tab)
tab.close()
return tab.name
def _writeInvXML(self, inv, filename='something.xml'):
'''Copied from tab2inv.py'''
ar = IO.XMLArchive()
print("Generating file: %s" % filename,
file=sys.stderr)
ar.create(filename)
ar.setFormattedOutput(True)
ar.setCompression(False)
ar.writeObject(inv)
ar.close()
def _writeNewInvXML(self, sc3inv, filename):
try:
os.unlink(filename)
except OSError: # Python3: Catch FileNotFoundError instead.
pass
self._writeInvXML(sc3inv, filename)
def test_1(self):
'''Create object'''
t = Tab()
print('Expect: "Warning, not filter folder supplied."',
file=sys.stderr)
def test_2_filter(self):
'''Provide a (trivial, non-useful) filter folder'''
t = Tab(None, None, '.', None, None)
def test_2_defaults_warning(self):
'''Provide and load a defaults file'''
defaults = tempfile.NamedTemporaryFile(delete=False)
print('''
Nw: QQ 2001/001
''', file=defaults)
defaultsFile = defaults.name
defaults.close()
t = Tab(None, defaultsFile, '.', None, None)
os.unlink(defaultsFile)
print("Expect: 'Warning: Defaults file can only contain attributes'",
file=sys.stderr)
def test_2_defaults_attributes(self):
'''Provide and load a defaults file'''
defaults = tempfile.NamedTemporaryFile(delete=False)
print('''
Na: Foo=bar
Sa: StationFoo=bla * *
Ia: InstrumentFoo=blu *
''', file=defaults)
defaultsFile = defaults.name
defaults.close()
t = Tab(None, defaultsFile, '.', None, None)
os.unlink(defaultsFile)
def test_3_digest(self):
tabFile = self._writeTempTab(self.simpleTab)
t = Tab(None, None, '.', None, None)
t.digest(tabFile)
os.unlink(tabFile)
def SKIPtest_3_digest_check(self):
tabFile = self._writeTempTab(self.simpleTab)
t = Tab(None, None, 'filters', None, None)
t.digest(tabFile)
t.digest(self.instFile)
t.check()
os.unlink(tabFile)
def test_4_digest_twice(self):
'''Exception is raised by digesting twice.'''
tabFile = self._writeTempTab(self.simpleTab)
t = Tab(None, None, '.', None, None)
t.digest(tabFile)
with self.assertRaises(Exception):
t.digest(tabFile)
# print('Expect: "Warning: File {name} is already digested."')
os.unlink(tabFile)
def test_5_na_after_sa(self):
'''Not allowed to provide Na lines after a Sl line'''
s = '\n'.join([self.simpleTab, 'Na: Pid=10.123/xyz'])
tabFile = self._writeTempTab(s)
with self.assertRaises(Exception):
t.digest(tabFile)
# print('Expect "No Na lines after a Sl line.',
# 'Network has already been defined."')
os.unlink(tabFile)
def test_6_network_pid(self):
'''Key 'Pid' is an allowed network attribute'''
tabString = '''
Nw: QQ 2001/001
Na: Region=Atlantis
Na: Pid=10.123/xyz
'''
tabFile = self._writeTempTab(tabString)
t = Tab(None, None, '.', None, None)
t.digest(tabFile)
os.unlink(tabFile)
def test_6_network_pid_check(self):
'''No problem to define extra unhandled attributes'''
tabString = '''
Nw: QQ 2001/001
Na: Region=Atlantis
Na: Pid=10.123/xyz
Na: Foo=bar
'''
tabFile = self._writeTempTab(tabString)
t = Tab(None, None, '.', None, None)
t.digest(tabFile)
t.check()
os.unlink(tabFile)
def test_7_sc3Obj(self):
'''Call sc3Obj with a trivial t'''
t = Tab(None, None, '.', None, None)
sc3inv = t.sc3Obj()
def test_8_network_sc3Obj(self):
'''Call sc3Obj with an actual network, write XML'''
tabFile = self._writeTempTab(self.simpleTab)
t = Tab(None, None, 'filters', None, None)
t.digest(tabFile)
t.digest(self.instFile)
sc3inv = t.sc3Obj()
# Returns ok, but reports inst.db errors and warnings to stdout.
self.assertTrue(sc3inv)
if sc3inv is None:
assert('scinv is None')
sc3inv
outFile = '/tmp/testTabInv.xml'
try:
os.unlink(outFile)
except OSError: # Python3: Catch FileNotFoundError instead.
pass
self._writeInvXML(sc3inv, filename=outFile)
self.assertTrue(os.path.exists(outFile))
# Further checks: that the file contains a network, etc.
def test_9_network_pid_sc3Obj(self):
'''Load a network with PID, write XML, confirm PID is there.
Older nettabs reported 'ignoring attribute Pid'.
'''
tabFile = self._writeTempTab(self.tabWithPid)
t = Tab(None, None, 'filters', None, None)
t.digest(tabFile)
t.digest(self.instFile)
sc3inv = t.sc3Obj()
self.assertTrue(sc3inv)
outFile = '/tmp/testTabInvPid.xml'
self._writeNewInvXML(sc3inv, outFile)
self.assertTrue(os.path.exists(outFile))
# Check that the file contains exactly one network comment
# which is a JSON string with PID.
# e.g. '{"type": "DOI", "value": "10.1234/xsdfa"}'
(elem, ns) = xmlparse(outFile)
for e in elem:
for f in e:
if f.tag == ns + 'network':
g = f.findall(ns + 'comment')
self.assertTrue(len(g) == 1)
t = g[0].findall(ns + 'text')
text = t[0].text
j = json.loads(t[0].text)
self.assertEqual(j['type'], 'DOI')
self.assertEqual(j['value'], '10.1234/xyz')
### self.assertEqual(t[0].text, 'doi:10.1234/xyz')
def test_10_network_comment(self):
tabString = '''
Nw: NN 2020/092
Na: Region=Atlantis
Na: Comment="This is commentary"
Na: Remark="Remarkable!"
Sl: AA01 "Zeus" Q330/N%xxxx STS-2/N%yyyy 20 Z 30 -15 -2 2.0 2020/093
'''
tabFile = self._writeTempTab(tabString)
t = Tab(None, None, 'filters', None, None)
t.digest(tabFile)
t.digest(self.instFile)
t.check()
os.unlink(tabFile)
sc3inv = t.sc3Obj()
self.assertTrue(sc3inv)
outFile = '/tmp/testTabInvComment.xml'
self._writeNewInvXML(sc3inv, '/tmp/testTabInvComment.xml')
self.assertTrue(os.path.exists(outFile))
# Further checks: that the file contains a network with PID. TODO
(elem, ns) = xmlparse(outFile)
for e in elem:
for f in e:
if f.tag == ns + 'network':
g = f.findall(ns + 'comment')
self.assertTrue(len(g) == 1)
# DEBUG print('DEBUG Network comment found:',
# g[0].findall(ns + 'text')[0].text)
if __name__ == '__main__':
unittest.main(verbosity=1)