commit 906e9ccf6ba7c18008e27f5587a4a83cc67f4189 Author: Benjamin Banaskiewicz Date: Thu Apr 20 08:44:01 2023 +0000 [installation] Initial commit with first config diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..fa1d8ac --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +var +*.pyc +HYPO* +RESET* +share/maps diff --git a/bin/Hypo71PC b/bin/Hypo71PC new file mode 100755 index 0000000..6b12024 Binary files /dev/null and b/bin/Hypo71PC differ diff --git a/bin/arclink2inv b/bin/arclink2inv new file mode 100755 index 0000000..4ab50d6 --- /dev/null +++ b/bin/arclink2inv @@ -0,0 +1,82 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +import seiscomp.datamodel +import seiscomp.io +import getopt +import sys + + +usage = """arclink2inv [options] input=stdin output=stdout + +Options: + -h [ --help ] Produce help message + -f [ --formatted ] Enable formatted XML output +""" + + +def main(argv): + imp = seiscomp.io.Importer.Create("arclink") + if imp is None: + sys.stderr.write("Arclink import not available\n") + return 1 + + formatted = False + + # parse command line options + try: + opts, args = getopt.getopt(argv[1:], "hf", ["help", "formatted"]) + except getopt.error as msg: + sys.stderr.write("%s\n" % msg) + sys.stderr.write("for help use --help\n") + return 1 + + for o, a in opts: + if o in ["-h", "--help"]: + sys.stderr.write("%s\n" % usage) + return 1 + elif o in ["-f", "--formatted"]: + formatted = True + + argv = args + + if len(argv) > 0: + o = imp.read(argv[0]) + else: + o = imp.read("-") + + inv = seiscomp.datamodel.Inventory.Cast(o) + if inv is None: + sys.stderr.write("No inventory found\n") + return 1 + + ar = seiscomp.io.XMLArchive() + if len(argv) > 1: + res = ar.create(argv[1]) + else: + res = ar.create("-") + + if not res: + sys.stderr.write("Failed to open output\n") + return 1 + + ar.setFormattedOutput(formatted) + ar.writeObject(inv) + ar.close() + return 0 + + +if __name__ == "__main__": + sys.exit(main(sys.argv)) diff --git a/bin/bindings2cfg b/bin/bindings2cfg new file mode 100755 index 0000000..bae2a58 --- /dev/null +++ b/bin/bindings2cfg @@ -0,0 +1,26 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- + +############################################################################ +# Copyright (C) gempa GmbH # +# All rights reserved. # +# Contact: gempa GmbH (seiscomp-dev@gempa.de) # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +# # +# Other Usage # +# Alternatively, this file may be used in accordance with the terms and # +# conditions contained in a signed written agreement between you and # +# gempa GmbH. # +############################################################################ + +import seiscomp.bindings2cfg +import sys + +sys.exit(seiscomp.bindings2cfg.main()) diff --git a/bin/dlsv2inv b/bin/dlsv2inv new file mode 100755 index 0000000..dd3be2f Binary files /dev/null and b/bin/dlsv2inv differ diff --git a/bin/ew2sc b/bin/ew2sc new file mode 100755 index 0000000..730f96a Binary files /dev/null and b/bin/ew2sc differ diff --git a/bin/extr_file b/bin/extr_file new file mode 100755 index 0000000..9cf416f --- /dev/null +++ b/bin/extr_file @@ -0,0 +1,28 @@ +#!/usr/bin/env seiscomp-python + +from __future__ import print_function +import sys +from seiscomp import mseedlite as mseed + +open_files = {} + +if len(sys.argv) != 2: + print("Usage: extr_file FILE") + sys.exit(1) + +for rec in mseed.Input(open(sys.argv[1], "rb")): + oname = "%s.%s.%s.%s" % (rec.sta, rec.net, rec.loc, rec.cha) + + if oname not in open_files: + postfix = ".D.%04d.%03d.%02d%02d" % (rec.begin_time.year, + rec.begin_time.timetuple()[7], rec.begin_time.hour, + rec.begin_time.minute) + + open_files[oname] = open(oname + postfix, "ab") + + ofile = open_files[oname] + ofile.write(rec.header + rec.data) + +for oname in open_files: + open_files[oname].close() + diff --git a/bin/fdsnws b/bin/fdsnws new file mode 100755 index 0000000..5cd594c --- /dev/null +++ b/bin/fdsnws @@ -0,0 +1,1482 @@ +#!/usr/bin/env seiscomp-python + +################################################################################ +# Copyright (C) 2013-2014 gempa GmbH +# +# FDSNWS -- Implements FDSN Web Service interface, see +# http://www.fdsn.org/webservices/ +# +# Implemented Services: +# fdsnws-dataselect +# fdsnws-event +# fdsnws-station +# fdsnws-availability +# +# Author: Stephan Herrnkind +# Email: herrnkind@gempa.de +############################################################################### + +from __future__ import absolute_import, division, print_function + +import base64 +import fnmatch +import os +import re +import signal +import sys +import time + +try: + from twisted.cred import checkers, credentials, error, portal + from twisted.internet import reactor, defer, task + from twisted.web import guard, static + from twisted.python import log, failure + from zope.interface import implementer +except ImportError as e: + sys.exit("%s\nIs python twisted installed?" % str(e)) + +import seiscomp.core +import seiscomp.datamodel +import seiscomp.io +import seiscomp.logging +import seiscomp.client +import seiscomp.system + + +from seiscomp.fdsnws.utils import isRestricted, py3ustr, py3bstr +from seiscomp.fdsnws.dataselect import FDSNDataSelect, FDSNDataSelectRealm, \ + FDSNDataSelectAuthRealm +from seiscomp.fdsnws.dataselect import VERSION as DataSelectVersion +from seiscomp.fdsnws.event import FDSNEvent +from seiscomp.fdsnws.event import VERSION as EventVersion +from seiscomp.fdsnws.station import FDSNStation +from seiscomp.fdsnws.station import VERSION as StationVersion +from seiscomp.fdsnws.availability import FDSNAvailabilityQuery, \ + FDSNAvailabilityQueryRealm, FDSNAvailabilityQueryAuthRealm, \ + FDSNAvailabilityExtent, FDSNAvailabilityExtentRealm, \ + FDSNAvailabilityExtentAuthRealm +from seiscomp.fdsnws.availability import VERSION as AvailabilityVersion +from seiscomp.fdsnws.http import DirectoryResource, ListingResource, \ + NoResource, Site, ServiceVersion, AuthResource, WADLFilter +from seiscomp.fdsnws.log import Log + + +def logSC3(entry): + try: + isError = entry['isError'] + msg = entry['message'] + if isError: + for l in msg: + seiscomp.logging.error("[reactor] %s" % l) + else: + for l in msg: + seiscomp.logging.info("[reactor] %s" % l) + except Exception: + pass + + +############################################################################### +# Make CORS work with queryauth +class HTTPAuthSessionWrapper(guard.HTTPAuthSessionWrapper): + def __init__(self, *args, **kwargs): + guard.HTTPAuthSessionWrapper.__init__(self, *args, **kwargs) + + def render(self, request): + if request.method == b'OPTIONS': + request.setHeader(b'Allow', b'GET,HEAD,POST,OPTIONS') + return b'' + + return guard.HTTPAuthSessionWrapper.render(self, request) + + +############################################################################### +@implementer(checkers.ICredentialsChecker) +class UsernamePasswordChecker(object): + + credentialInterfaces = (credentials.IUsernamePassword, + credentials.IUsernameHashedPassword) + + # ------------------------------------------------------------------------- + def __init__(self, userdb): + self.__userdb = userdb + + # ------------------------------------------------------------------------- + @staticmethod + def __cbPasswordMatch(matched, username): + if matched: + return username + + return failure.Failure(error.UnauthorizedLogin()) + + # ------------------------------------------------------------------------- + def requestAvatarId(self, cred): + return defer.maybeDeferred(self.__userdb.checkPassword, cred) \ + .addCallback(self.__cbPasswordMatch, cred.username) + + +############################################################################### +class UserDB(object): + + # ------------------------------------------------------------------------- + def __init__(self): + self.__users = {} + self.__blacklist = set() + task.LoopingCall(self.__expireUsers).start(60, False) + + # ------------------------------------------------------------------------- + def __expireUsers(self): + for (name, (_, _, expires)) in list(self.__users.items()): + if time.time() > expires: + seiscomp.logging.info("de-registering %s" % name) + del self.__users[name] + + # ------------------------------------------------------------------------- + def blacklistUser(self, name): + seiscomp.logging.info("blacklisting %s" % name) + self.__blacklist.add(name) + + # ------------------------------------------------------------------------- + def addUser(self, name, attributes, expires, data): + try: + password = self.__users[name][0] + + except KeyError: + bl = " (blacklisted)" if name in self.__blacklist else "" + seiscomp.logging.notice("registering %s%s %s" % (name, bl, data)) + password = base64.urlsafe_b64encode(os.urandom(12)) + + attributes['blacklisted'] = name in self.__blacklist + self.__users[name] = (password, attributes, expires) + return password + + # ------------------------------------------------------------------------- + def checkPassword(self, cred): + try: + pw = self.__users[cred.username][0] + + except KeyError: + return False + + return cred.checkPassword(pw) + + # ------------------------------------------------------------------------- + def getAttributes(self, name): + return self.__users[name][1] + + # ------------------------------------------------------------------------- + def dump(self): + seiscomp.logging.info("known users:") + + for name, user in list(self.__users.items()): + seiscomp.logging.info(" %s %s %d" % (py3ustr(name), + user[1], user[2])) + + +############################################################################### +class Access(object): + + # ------------------------------------------------------------------------- + def __init__(self): + self.__access = {} + + # ------------------------------------------------------------------------- + def initFromSC3Routing(self, routing): + for i in range(routing.accessCount()): + acc = routing.access(i) + net = acc.networkCode() + sta = acc.stationCode() + loc = acc.locationCode() + cha = acc.streamCode() + user = acc.user() + start = acc.start() + + try: + end = acc.end() + + except ValueError: + end = None + + self.__access.setdefault((net, sta, loc, cha), []) \ + .append((user, start, end)) + + # ------------------------------------------------------------------------- + @staticmethod + def __matchTime(t1, t2, accessStart, accessEnd): + return (not accessStart or (t1 and t1 >= accessStart)) and \ + (not accessEnd or (t2 and t2 <= accessEnd)) + + # ------------------------------------------------------------------------- + @staticmethod + def __matchEmail(emailAddress, accessUser): + defaultPrefix = "mail:" + + if accessUser.startswith(defaultPrefix): + accessUser = accessUser[len(defaultPrefix):] + + return emailAddress.upper() == accessUser.upper() or ( + accessUser[:1] == '@' and emailAddress[:1] != '@' and + emailAddress.upper().endswith(accessUser.upper())) + + # ------------------------------------------------------------------------- + @staticmethod + def __matchAttribute(attribute, accessUser): + return attribute.upper() == accessUser.upper() + + # ------------------------------------------------------------------------- + def authorize(self, user, net, sta, loc, cha, t1, t2): + if user['blacklisted']: + return False + + matchers = [] + + try: + # OID 0.9.2342.19200300.100.1.3 (RFC 2798) + emailAddress = user['mail'] + matchers.append((self.__matchEmail, emailAddress)) + + except KeyError: + pass + + try: + # B2ACCESS + for memberof in user['memberof'].split(';'): + matchers.append((self.__matchAttribute, "group:" + memberof)) + + except KeyError: + pass + + for m in matchers: + for (u, start, end) in self.__access.get((net, '', '', ''), []): + if self.__matchTime(t1, t2, start, end) and m[0](m[1], u): + return True + + for (u, start, end) in self.__access.get((net, sta, '', ''), []): + if self.__matchTime(t1, t2, start, end) and m[0](m[1], u): + return True + + for (u, start, end) in self.__access.get((net, sta, loc, cha), []): + if self.__matchTime(t1, t2, start, end) and m[0](m[1], u): + return True + + return False + + +############################################################################### +class DataAvailabilityCache(object): + + # ------------------------------------------------------------------------- + def __init__(self, app, da, validUntil): + self._da = da + self._validUntil = validUntil + self._extents = {} + self._extentsSorted = [] + self._extentsOID = {} + + for i in range(self._da.dataExtentCount()): + ext = self._da.dataExtent(i) + wid = ext.waveformID() + sid = "%s.%s.%s.%s" % (wid.networkCode(), wid.stationCode(), + wid.locationCode(), wid.channelCode()) + restricted = app._openStreams is None or sid not in app._openStreams + if restricted and not app._allowRestricted: + continue + self._extents[sid] = (ext, restricted) + # seiscomp.logging.debug("%s: %s ~ %s" % (sid, ext.start().iso(), + # ext.end().iso())) + + if app._serveAvailability: + # load data attribute extents if availability is served + for i in range(da.dataExtentCount()): + extent = da.dataExtent(i) + app.query().loadDataAttributeExtents(extent) + + # create a list of (extent, oid, restricted) tuples sorted by stream + self._extentsSorted = [(e, app.query().getCachedId(e), res) + for wid, (e, res) in sorted( + self._extents.items(), + key=lambda t: t[0])] + + # create a dictionary of object ID to extents + self._extentsOID = dict((oid, (e, res)) + for (e, oid, res) in self._extentsSorted) + + seiscomp.logging.info("loaded %i extents" % len(self._extents)) + + # ------------------------------------------------------------------------- + def validUntil(self): + return self._validUntil + + # ------------------------------------------------------------------------- + def extent(self, net, sta, loc, cha): + wid = "%s.%s.%s.%s" % (net, sta, loc, cha) + if wid in self._extents: + return self._extents[wid][0] + + return None + + # ------------------------------------------------------------------------- + def extents(self): + return self._extents + + # ------------------------------------------------------------------------- + def extentsSorted(self): + return self._extentsSorted + + # ------------------------------------------------------------------------- + def extentsOID(self): + return self._extentsOID + + # ------------------------------------------------------------------------- + def dataAvailability(self): + return self._da + + +############################################################################### +class FDSNWS(seiscomp.client.Application): + + # ------------------------------------------------------------------------- + def __init__(self, argc, argv): + seiscomp.client.Application.__init__(self, argc, argv) + self.setMessagingEnabled(True) + self.setDatabaseEnabled(True, True) + self.setRecordStreamEnabled(True) + self.setLoadInventoryEnabled(True) + + self._serverRoot = os.path.dirname(__file__) + self._listenAddress = '0.0.0.0' # all interfaces + self._port = 8080 + self._connections = 5 + self._queryObjects = 100000 # maximum number of objects per query + self._realtimeGap = None # minimum data age: 5min + self._samplesM = None # maximum number of samples per query + self._recordBulkSize = 102400 # desired record bulk size + self._htpasswd = '@CONFIGDIR@/fdsnws.htpasswd' + self._accessLogFile = '' + self._requestLogFile = '' + self._userSalt = '' + self._corsOrigins = ['*'] + + self._allowRestricted = True + self._useArclinkAccess = False + self._serveDataSelect = True + self._serveEvent = True + self._serveStation = True + self._serveAvailability = False + self._daEnabled = False + self._daCacheDuration = 300 + self._daCache = None + self._openStreams = None + self._daRepositoryName = 'primary' + self._daDCCName = 'DCC' + self._handleConditionalRequests = False + + self._hideAuthor = False + self._hideComments = False + self._evaluationMode = None + self._eventTypeWhitelist = None + self._eventTypeBlacklist = None + self._eventFormats = None + self._stationFilter = None + self._dataSelectFilter = None + self._debugFilter = False + + self._accessLog = None + + self._fileNamePrefix = 'fdsnws' + + self._trackdbEnabled = False + self._trackdbDefaultUser = 'fdsnws' + + self._authEnabled = False + self._authGnupgHome = '@ROOTDIR@/var/lib/gpg' + self._authBlacklist = [] + + self._userdb = UserDB() + self._access = None + self._checker = None + + self._requestLog = None + self.__reloadRequested = False + self.__timeInventoryLoaded = None + self.__tcpPort = None + + # Leave signal handling to us + seiscomp.client.Application.HandleSignals(False, False) + + # ------------------------------------------------------------------------- + def initConfiguration(self): + if not seiscomp.client.Application.initConfiguration(self): + return False + + # bind address and port + try: + self._listenAddress = self.configGetString('listenAddress') + except Exception: + pass + try: + self._port = self.configGetInt('port') + except Exception: + pass + + # maximum number of connections + try: + self._connections = self.configGetInt('connections') + except Exception: + pass + + # maximum number of objects per query, used in fdsnws-station and + # fdsnws-event to limit main memory consumption + try: + self._queryObjects = self.configGetInt('queryObjects') + except Exception: + pass + + # restrict end time of request to now-realtimeGap seconds, used in + # fdsnws-dataselect + try: + self._realtimeGap = self.configGetInt('realtimeGap') + except Exception: + pass + + # maximum number of samples (in units of million) per query, used in + # fdsnws-dataselect to limit bandwidth + try: + self._samplesM = self.configGetDouble('samplesM') + except Exception: + pass + + try: + self._recordBulkSize = self.configGetInt('recordBulkSize') + except Exception: + pass + + if self._recordBulkSize < 1: + print("Invalid recordBulkSize, must be larger than 0", + file=sys.stderr) + return False + + # location of htpasswd file + try: + self._htpasswd = self.configGetString('htpasswd') + except Exception: + pass + self._htpasswd = seiscomp.system.Environment.Instance() \ + .absolutePath(self._htpasswd) + + # location of access log file + try: + self._accessLogFile = seiscomp.system.Environment.Instance() \ + .absolutePath(self.configGetString('accessLog')) + except Exception: + pass + + # location of request log file + try: + self._requestLogFile = seiscomp.system.Environment.Instance() \ + .absolutePath(self.configGetString('requestLog')) + except Exception: + pass + + # user salt + try: + self._userSalt = self.configGetString('userSalt') + except Exception: + pass + + # list of allowed CORS origins + try: + self._corsOrigins = list(filter(None, + self.configGetStrings('corsOrigins'))) + except Exception: + pass + + # access to restricted inventory information + try: + self._allowRestricted = self.configGetBool('allowRestricted') + except Exception: + pass + + # time-based conditional requests handled by fdsnws-station + try: + self._handleConditionalRequests = \ + self.configGetBool('handleConditionalRequests') + except Exception: + pass + + # use arclink-access bindings + try: + self._useArclinkAccess = self.configGetBool('useArclinkAccess') + except Exception: + pass + + # services to enable + try: + self._serveDataSelect = self.configGetBool('serveDataSelect') + except Exception: + pass + try: + self._serveEvent = self.configGetBool('serveEvent') + except Exception: + pass + try: + self._serveStation = self.configGetBool('serveStation') + except Exception: + pass + try: + self._serveAvailability = self.configGetBool('serveAvailability') + except Exception: + pass + + # data availability + try: + self._daEnabled = self.configGetBool('dataAvailability.enable') + except Exception: + pass + try: + self._daCacheDuration = self.configGetInt( + 'dataAvailability.cacheDuration') + except Exception: + pass + try: + self._daRepositoryName = self.configGetString( + 'dataAvailability.repositoryName') + except Exception: + pass + try: + self._daDCCName = self.configGetString('dataAvailability.dccName') + except Exception: + pass + + if self._serveAvailability and not self._daEnabled: + print("can't serve availabilty without dataAvailability.enable " + "set to true", file=sys.stderr) + return False + if not bool(re.match(r'^[a-zA-Z0-9_\ -]*$', self._daRepositoryName)): + print("invalid characters in dataAvailability.repositoryName", + file=sys.stderr) + return False + if not bool(re.match(r'^[a-zA-Z0-9_\ -]*$', self._daDCCName)): + print("invalid characters in dataAvailability.dccName", + file=sys.stderr) + return False + + # event filter + try: + self._hideAuthor = self.configGetBool('hideAuthor') + except Exception: + pass + try: + self._hideComments = self.configGetBool('hideComments') + except Exception: + pass + try: + name = self.configGetString('evaluationMode') + if name.lower() == seiscomp.datamodel.EEvaluationModeNames.name( + seiscomp.datamodel.MANUAL): + self._evaluationMode = seiscomp.datamodel.MANUAL + elif name.lower() == seiscomp.datamodel.EEvaluationModeNames.name( + seiscomp.datamodel.AUTOMATIC): + self._evaluationMode = seiscomp.datamodel.AUTOMATIC + else: + print("invalid evaluation mode string: %s" % name, + file=sys.stderr) + return False + except Exception: + pass + try: + strings = self.configGetStrings('eventType.whitelist') + if len(strings) > 1 or strings[0]: + try: + self._eventTypeWhitelist = self._parseEventTypes(strings) + except Exception as e: + print("error parsing eventType.whitelist: %s" % str(e), + file=sys.stderr) + return False + except Exception: + pass + try: + strings = self.configGetStrings('eventType.blacklist') + if len(strings) > 1 or strings[0]: + try: + self._eventTypeBlacklist = self._parseEventTypes(strings) + if self._eventTypeWhitelist: + lenBefore = len(self._eventTypeWhitelist) + diff = self._eventTypeWhitelist.difference( + self._eventTypeBlacklist) + overlapCount = lenBefore - len(diff) + if overlapCount > 0: + self._eventTypeWhitelist = diff + print("warning: found %i overlapping event " + "types in white and black list, black " + "list takes precedence" % overlapCount, + file=sys.stderr) + except Exception as e: + print("error parsing eventType.blacklist: %s" % str(e), + file=sys.stderr) + return False + except Exception: + pass + try: + strings = self.configGetStrings('eventFormats') + if len(strings) > 1 or strings[0]: + self._eventFormats = [s.lower() for s in strings] + except Exception: + pass + + # station filter + try: + self._stationFilter = seiscomp.system.Environment.Instance() \ + .absolutePath(self.configGetString('stationFilter')) + except Exception: + pass + + # dataSelect filter + try: + self._dataSelectFilter = seiscomp.system.Environment.Instance() \ + .absolutePath(self.configGetString('dataSelectFilter')) + except Exception: + pass + + # output filter debug information + try: + self._debugFilter = self.configGetBool('debugFilter') + except Exception: + pass + + # prefix to be used as default for output filenames + try: + self._fileNamePrefix = self.configGetString('fileNamePrefix') + except Exception: + pass + + # save request logs in database? + try: + self._trackdbEnabled = self.configGetBool('trackdb.enable') + except Exception: + pass + + # default user + try: + self._trackdbDefaultUser = self.configGetString( + 'trackdb.defaultUser') + except Exception: + pass + + # enable authentication extension? + try: + self._authEnabled = self.configGetBool('auth.enable') + except Exception: + pass + + # GnuPG home directory + try: + self._authGnupgHome = self.configGetString('auth.gnupgHome') + except Exception: + pass + self._authGnupgHome = seiscomp.system.Environment.Instance() \ + .absolutePath(self._authGnupgHome) + + # blacklist of users/tokens + try: + strings = self.configGetStrings('auth.blacklist') + if len(strings) > 1 or strings[0]: + self._authBlacklist = strings + except Exception: + pass + + # If the database connection is passed via command line or + # configuration file then messaging is disabled. Messaging is only used + # to get the configured database connection URI. + if self.databaseURI() != "": + self.setMessagingEnabled(self._trackdbEnabled) + else: + # Without the event service, a database connection is not + # required if the inventory is loaded from file and no data + # availability information should be processed + if not self._serveEvent and not self._useArclinkAccess and \ + (not self._serveStation or \ + (not self.isInventoryDatabaseEnabled() and not self._daEnabled)): + self.setMessagingEnabled(self._trackdbEnabled) + self.setDatabaseEnabled(False, False) + + return True + + def printUsage(self): + + print('''Usage: + fdsnws [options] + +Provide FDSN Web Services''') + + seiscomp.client.Application.printUsage(self) + + print('''Examples: +Execute on command line with debug output + fdsnws --debug +''') + + # ------------------------------------------------------------------------- + # Signal handling in Python and fork in wrapped C++ code is not a good + # combination. Without digging too much into the problem, forking the + # process with os.fork() helps + def forkProcess(self): + cp = os.fork() + if cp < 0: + return False + if cp == 0: + return True + + sys.exit(0) + return True + + # ------------------------------------------------------------------------- + def getDACache(self): + if not self._daEnabled: + return None + + now = seiscomp.core.Time.GMT() + # check if cache is still valid + if self._daCache is None or now > self._daCache.validUntil(): + + if self.query() is None: + seiscomp.logging.error('failed to connect to database') + return None + + da = seiscomp.datamodel.DataAvailability() + self.query().loadDataExtents(da) + validUntil = now + seiscomp.core.TimeSpan(self._daCacheDuration, 0) + self._daCache = DataAvailabilityCache(self, da, validUntil) + + return self._daCache + + # ------------------------------------------------------------------------- + @staticmethod + def _parseEventTypes(names): + types = set() + typeMap = {seiscomp.datamodel.EEventTypeNames.name(i): i + for i in range(seiscomp.datamodel.EEventTypeQuantity)} + for n in names: + name = n.lower().strip() + if name == "unknown": + types.add(-1) + else: + if name in typeMap: + types.add(typeMap[name]) + else: + raise Exception("event type name '%s' not supported" + % name) + + return types + + # ------------------------------------------------------------------------- + @staticmethod + def _formatEventTypes(types): + return ",".join(["unknown" if i < 0 else + seiscomp.datamodel.EEventTypeNames.name(i) + for i in sorted(types)]) + + # ------------------------------------------------------------------------- + def _site(self): + modeStr = None + if self._evaluationMode is not None: + modeStr = seiscomp.datamodel.EEvaluationModeNames.name(self._evaluationMode) + whitelistStr = "" + if self._eventTypeWhitelist is not None: + whitelistStr = ", ".join(["unknown" if i < 0 else + seiscomp.datamodel.EEventTypeNames.name(i) + for i in sorted(self._eventTypeWhitelist)]) + blacklistStr = "" + if self._eventTypeBlacklist is not None: + blacklistStr = ", ".join(["unknown" if i < 0 else + seiscomp.datamodel.EEventTypeNames.name(i) + for i in sorted(self._eventTypeBlacklist)]) + stationFilterStr = "" + if self._stationFilter is not None: + stationFilterStr = self._stationFilter + dataSelectFilterStr = "" + if self._dataSelectFilter is not None: + dataSelectFilterStr = self._dataSelectFilter + seiscomp.logging.debug(""" +configuration read: + serve + dataselect : {} + event : {} + station : {} + availability : {} + listenAddress : {} + port : {} + connections : {} + htpasswd : {} + accessLog : {} + CORS origins : {} + queryObjects : {} + realtimeGap : {} + samples (M) : {} + recordBulkSize : {} + allowRestricted : {} + handleConditionalRequests: {} + useArclinkAccess : {} + hideAuthor : {} + hideComments : {} + evaluationMode : {} + data availability + enabled : {} + cache duration : {} + repo name : {} + dcc name : {} + eventType + whitelist : {} + blacklist : {} + inventory filter + station : {} + dataSelect : {} + debug enabled : {} + trackdb + enabled : {} + defaultUser : {} + auth + enabled : {} + gnupgHome : {} + requestLog : {}""".format( \ + self._serveDataSelect, self._serveEvent, self._serveStation, + self._serveAvailability, self._listenAddress, self._port, + self._connections, self._htpasswd, self._accessLogFile, + self._corsOrigins, self._queryObjects, self._realtimeGap, + self._samplesM, self._recordBulkSize, self._allowRestricted, + self._handleConditionalRequests, self._useArclinkAccess, + self._hideAuthor, self._hideComments, modeStr, self._daEnabled, + self._daCacheDuration, self._daRepositoryName, self._daDCCName, + whitelistStr, blacklistStr, stationFilterStr, dataSelectFilterStr, + self._debugFilter, self._trackdbEnabled, self._trackdbDefaultUser, + self._authEnabled, self._authGnupgHome, self._requestLogFile)) + + if not self._serveDataSelect and not self._serveEvent and \ + not self._serveStation: + seiscomp.logging.error("all services disabled through configuration") + return None + + # access logger if requested + if self._accessLogFile: + self._accessLog = Log(self._accessLogFile) + + # load inventory needed by DataSelect and Station service + stationInv = dataSelectInv = None + if self._serveDataSelect or self._serveStation: + retn = False + stationInv = dataSelectInv = seiscomp.client.Inventory.Instance().inventory() + seiscomp.logging.info("inventory loaded") + + if self._serveDataSelect and self._serveStation: + # clone inventory if station and dataSelect filter are distinct + # else share inventory between both services + if self._stationFilter != self._dataSelectFilter: + dataSelectInv = self._cloneInventory(stationInv) + retn = self._filterInventory(stationInv, self._stationFilter, "station") and \ + self._filterInventory( + dataSelectInv, self._dataSelectFilter, "dataSelect") + else: + retn = self._filterInventory( + stationInv, self._stationFilter) + elif self._serveStation: + retn = self._filterInventory(stationInv, self._stationFilter) + else: + retn = self._filterInventory( + dataSelectInv, self._dataSelectFilter) + + self.__timeInventoryLoaded = seiscomp.core.Time.GMT() + + if not retn: + return None + + if self._authEnabled: + self._access = Access() + self._checker = UsernamePasswordChecker(self._userdb) + else: + self._access = Access() if self._useArclinkAccess else None + self._checker = checkers.FilePasswordDB(self._htpasswd, cache=True) + + if self._serveDataSelect and self._useArclinkAccess: + self._access.initFromSC3Routing(self.query().loadRouting()) + + seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False) + + shareDir = os.path.join(seiscomp.system.Environment.Instance().shareDir(), 'fdsnws') + + # Overwrite/set mime type of *.wadl and *.xml documents. Instead of + # using the official types defined in /etc/mime.types 'application/xml' + # is used as enforced by the FDSNWS spec. + static.File.contentTypes['.wadl'] = 'application/xml' + static.File.contentTypes['.xml'] = 'application/xml' + + # create resource tree /fdsnws/... + root = ListingResource() + + fileName = os.path.join(shareDir, 'favicon.ico') + fileRes = static.File(fileName, 'image/x-icon') + fileRes.childNotFound = NoResource() + fileRes.isLeaf = True + root.putChild(b'favicon.ico', fileRes) + + prefix = ListingResource() + root.putChild(b'fdsnws', prefix) + + # dataselect + if self._serveDataSelect: + dataselect = ListingResource(DataSelectVersion) + prefix.putChild(b'dataselect', dataselect) + lstFile = os.path.join(shareDir, 'dataselect.html') + dataselect1 = DirectoryResource(lstFile, DataSelectVersion) + dataselect.putChild(b'1', dataselect1) + + # query + dataselect1.putChild(b'query', FDSNDataSelect( + dataSelectInv, self._recordBulkSize)) + + # queryauth + if self._authEnabled: + realm = FDSNDataSelectAuthRealm( + dataSelectInv, self._recordBulkSize, self._access, self._userdb) + else: + realm = FDSNDataSelectRealm( + dataSelectInv, self._recordBulkSize, self._access) + msg = 'authorization for restricted time series data required' + authSession = self._getAuthSessionWrapper(realm, msg) + dataselect1.putChild(b'queryauth', authSession) + + # version + dataselect1.putChild(b'version', ServiceVersion(DataSelectVersion)) + fileRes = static.File(os.path.join(shareDir, 'dataselect.wadl')) + fileRes.childNotFound = NoResource(DataSelectVersion) + + # application.wadl + dataselect1.putChild(b'application.wadl', fileRes) + + # builder + fileRes = static.File(os.path.join( + shareDir, 'dataselect-builder.html')) + fileRes.childNotFound = NoResource(DataSelectVersion) + dataselect1.putChild(b'builder', fileRes) + + if self._authEnabled: + dataselect1.putChild(b'auth', AuthResource( + DataSelectVersion, self._authGnupgHome, self._userdb)) + + # event + if self._serveEvent: + event = ListingResource(EventVersion) + prefix.putChild(b'event', event) + lstFile = os.path.join(shareDir, 'event.html') + event1 = DirectoryResource(lstFile, EventVersion) + event.putChild(b'1', event1) + + # query + event1.putChild(b'query', FDSNEvent( + self._hideAuthor, self._hideComments, self._evaluationMode, + self._eventTypeWhitelist, self._eventTypeBlacklist, + self._eventFormats)) + + # catalogs + fileRes = static.File(os.path.join(shareDir, 'catalogs.xml')) + fileRes.childNotFound = NoResource(EventVersion) + event1.putChild(b'catalogs', fileRes) + + # contributors + fileRes = static.File(os.path.join(shareDir, 'contributors.xml')) + fileRes.childNotFound = NoResource(EventVersion) + event1.putChild(b'contributors', fileRes) + + # version + event1.putChild(b'version', ServiceVersion(EventVersion)) + + # application.wadl + filterList = ['includecomments'] if self._hideComments else [] + try: + fileRes = WADLFilter(os.path.join(shareDir, 'event.wadl'), + filterList) + except Exception: + fileRes = NoResource(StationVersion) + event1.putChild(b'application.wadl', fileRes) + + # builder + fileRes = static.File(os.path.join(shareDir, 'event-builder.html')) + fileRes.childNotFound = NoResource(EventVersion) + event1.putChild(b'builder', fileRes) + + # station + if self._serveStation: + station = ListingResource(StationVersion) + prefix.putChild(b'station', station) + lstFile = os.path.join(shareDir, 'station.html') + station1 = DirectoryResource(lstFile, StationVersion) + station.putChild(b'1', station1) + + # query + station1.putChild(b'query', FDSNStation( + stationInv, self._allowRestricted, self._queryObjects, + self._daEnabled, self._handleConditionalRequests, + self.__timeInventoryLoaded)) + + # version + station1.putChild(b'version', ServiceVersion(StationVersion)) + + # application.wadl + filterList = [] if self._daEnabled else ['matchtimeseries'] + try: + fileRes = WADLFilter(os.path.join(shareDir, 'station.wadl'), + filterList) + except Exception: + fileRes = NoResource(StationVersion) + station1.putChild(b'application.wadl', fileRes) + + # builder + fileRes = static.File(os.path.join(shareDir, 'station-builder.html')) + fileRes.childNotFound = NoResource(StationVersion) + station1.putChild(b'builder', fileRes) + + # availability + if self._serveAvailability: + + # create a set of waveformIDs which represent open channels + if self._serveDataSelect: + openStreams = set() + for iNet in range(dataSelectInv.networkCount()): + net = dataSelectInv.network(iNet) + if isRestricted(net): + continue + for iSta in range(net.stationCount()): + sta = net.station(iSta) + if isRestricted(sta): + continue + for iLoc in range(sta.sensorLocationCount()): + loc = sta.sensorLocation(iLoc) + for iCha in range(loc.streamCount()): + cha = loc.stream(iCha) + if isRestricted(cha): + continue + openStreams.add("{0}.{1}.{2}.{3}".format( + net.code(), sta.code(), loc.code(), cha.code())) + self._openStreams = openStreams + else: + self._openStreams = None + + availability = ListingResource(AvailabilityVersion) + prefix.putChild(b'availability', availability) + lstFile = os.path.join(shareDir, 'availability.html') + availability1 = DirectoryResource(lstFile, AvailabilityVersion) + availability.putChild(b'1', availability1) + + # query + availability1.putChild(b'query', FDSNAvailabilityQuery()) + + # queryauth + if self._authEnabled: + realm = FDSNAvailabilityQueryAuthRealm(self._access, + self._userdb) + else: + realm = FDSNAvailabilityQueryRealm(self._access) + msg = 'authorization for restricted availability segment data ' \ + 'required' + authSession = self._getAuthSessionWrapper(realm, msg) + availability1.putChild(b'queryauth', authSession) + + # extent + availability1.putChild(b'extent', FDSNAvailabilityExtent()) + + # extentauth + if self._authEnabled: + realm = FDSNAvailabilityExtentAuthRealm(self._access, + self._userdb) + else: + realm = FDSNAvailabilityExtentRealm(self._access) + msg = 'authorization for restricted availability extent data ' \ + 'required' + authSession = self._getAuthSessionWrapper(realm, msg) + availability1.putChild(b'extentauth', authSession) + + # version + availability1.putChild( + b'version', ServiceVersion(AvailabilityVersion)) + + # application.wadl + fileRes = static.File(os.path.join(shareDir, 'availability.wadl')) + fileRes.childNotFound = NoResource(AvailabilityVersion) + availability1.putChild(b'application.wadl', fileRes) + + # builder-query + fileRes = static.File(os.path.join( + shareDir, 'availability-builder-query.html')) + fileRes.childNotFound = NoResource(AvailabilityVersion) + availability1.putChild(b'builder-query', fileRes) + + # builder-extent + fileRes = static.File(os.path.join( + shareDir, 'availability-builder-extent.html')) + fileRes.childNotFound = NoResource(AvailabilityVersion) + availability1.putChild(b'builder-extent', fileRes) + + # static files + fileRes = static.File(os.path.join(shareDir, 'js')) + fileRes.childNotFound = NoResource() + fileRes.hideInListing = True + prefix.putChild(b'js', fileRes) + + fileRes = static.File(os.path.join(shareDir, 'css')) + fileRes.childNotFound = NoResource() + fileRes.hideInListing = True + prefix.putChild(b'css', fileRes) + + return Site(root, self._corsOrigins) + + # ------------------------------------------------------------------------- + def _reloadTask(self): + if not self.__reloadRequested: + return + + seiscomp.logging.info("reloading inventory") + self.reloadInventory() + + site = self._site() + + if site: + self.__tcpPort.factory = site + + # remove reload file + try: + reloadfile = os.path.join( + seiscomp.system.Environment.Instance().installDir(), + 'var', 'run', '{}.reload'.format(self.name())) + if os.path.isfile(reloadfile): + os.remove(reloadfile) + except Exception as e: + seiscomp.logging.warning( + "error processing reload file: {}".format(e)) + + seiscomp.logging.info("reload successful") + + else: + seiscomp.logging.info("reload failed") + + self._userdb.dump() + self.__reloadRequested = False + + # ------------------------------------------------------------------------- + def _sighupHandler(self, signum, frame): #pylint: disable=W0613 + if self.__reloadRequested: + seiscomp.logging.info("SIGHUP received, reload already in progress") + else: + seiscomp.logging.info("SIGHUP received, reload scheduled") + self.__reloadRequested = True + + # ------------------------------------------------------------------------- + def run(self): + retn = False + try: + # request logger if requested + self._requestLog = None + if self._requestLogFile: + # import here, so we don't depend on GeoIP if request log is not + # needed + from seiscomp.fdsnws.reqlog import RequestLog # pylint: disable=C0415 + self._requestLog = RequestLog(self._requestLogFile, self._userSalt) + + for user in self._authBlacklist: + self._userdb.blacklistUser(user) + + site = self._site() + + if not site: + return False + + # start listen for incoming request + self.__tcpPort = reactor.listenTCP(self._port, + site, + self._connections, + self._listenAddress) + + # setup signal handler + signal.signal(signal.SIGHUP, self._sighupHandler) + task.LoopingCall(self._reloadTask).start(1, False) + + # start processing + seiscomp.logging.info("start listening") + log.addObserver(logSC3) + + reactor.run() + retn = True + except Exception as e: + seiscomp.logging.error(str(e)) + + return retn + + # ------------------------------------------------------------------------- + @staticmethod + def _cloneInventory(inv): + wasEnabled = seiscomp.datamodel.PublicObject.IsRegistrationEnabled() + seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False) + inv2 = seiscomp.datamodel.Inventory.Cast(inv.clone()) + + for iNet in range(inv.networkCount()): + net = inv.network(iNet) + net2 = seiscomp.datamodel.Network.Cast(net.clone()) + inv2.add(net2) + + for iSta in range(net.stationCount()): + sta = net.station(iSta) + sta2 = seiscomp.datamodel.Station.Cast(sta.clone()) + net2.add(sta2) + + for iLoc in range(sta.sensorLocationCount()): + loc = sta.sensorLocation(iLoc) + loc2 = seiscomp.datamodel.SensorLocation.Cast(loc.clone()) + sta2.add(loc2) + + for iCha in range(loc.streamCount()): + cha = loc.stream(iCha) + cha2 = seiscomp.datamodel.Stream.Cast(cha.clone()) + loc2.add(cha2) + + seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled) + return inv2 + + # ------------------------------------------------------------------------- + def _filterInventory(self, inv, fileName, serviceName=""): + if not fileName: + return True + + class FilterRule: + def __init__(self, name, code): + self.name = name + self.exclude = name.startswith("!") + self.code = code + + self.restricted = None + self.shared = None + self.netClass = None + self.archive = None + + # read filter configuration from INI file + invFilter = [] + includeRuleDefined = False + try: + # pylint: disable=C0415 + if sys.version_info[0] < 3: + from ConfigParser import ConfigParser + from ConfigParser import Error as CPError + else: + from configparser import ConfigParser + from configparser import Error as CPError + except ImportError: + seiscomp.logging.error("could not load 'ConfigParser' Python module") + return False + + cp = ConfigParser() + + try: + seiscomp.logging.notice("reading inventory filter file: %s" % fileName) + fp = open(fileName, 'r') + if sys.version_info < (3, 2): + cp.readfp(fp) # pylint: disable=W1505 + else: + cp.read_file(fp, fileName) + + if len(cp.sections()) == 0: + return True + + # check for mandatory code attribute + for sectionName in cp.sections(): + code = "" + try: + code = cp.get(sectionName, "code") + except CPError: + seiscomp.logging.error( + "missing 'code' attribute in section {} of inventory " + "filter file {}".format(sectionName, fileName)) + return False + + rule = FilterRule(sectionName, str(code)) + + try: + rule.restricted = cp.getboolean(sectionName, 'restricted') + except CPError: + pass + + try: + rule.shared = cp.getboolean(sectionName, 'shared') + except CPError: + pass + + try: + rule.netClass = str(cp.get(sectionName, 'netClass')) + except CPError: + pass + + try: + rule.archive = str(cp.get(sectionName, 'archive')) + except CPError: + pass + + includeRuleDefined |= not rule.exclude + invFilter.append(rule) + + except Exception as e: + seiscomp.logging.error( + "could not read inventory filter file %s: %s" % (fileName, str(e))) + return False + + # apply filter + # networks + if self._debugFilter: + debugLines = [] + delNet = delSta = delLoc = delCha = 0 + iNet = 0 + while iNet < inv.networkCount(): + net = inv.network(iNet) + + try: + netRestricted = net.restricted() + except ValueError: + netRestricted = None + try: + netShared = net.shared() + except ValueError: + netShared = None + + # stations + iSta = 0 + while iSta < net.stationCount(): + sta = net.station(iSta) + staCode = "%s.%s" % (net.code(), sta.code()) + + try: + staRestricted = sta.restricted() + except ValueError: + staRestricted = None + try: + staShared = sta.shared() + except ValueError: + staShared = None + + # sensor locations + iLoc = 0 + while iLoc < sta.sensorLocationCount(): + loc = sta.sensorLocation(iLoc) + locCode = "%s.%s" % (staCode, loc.code()) + + # channels + iCha = 0 + while iCha < loc.streamCount(): + cha = loc.stream(iCha) + code = "%s.%s" % (locCode, cha.code()) + + # evaluate rules until matching code is found + match = False + for rule in invFilter: + # code + if not fnmatch.fnmatchcase(code, rule.code): + continue + + # restricted + if rule.restricted is not None: + try: + if cha.restricted() != rule.restricted: + continue + except ValueError: + if staRestricted is not None: + if staRestricted != rule.restricted: + continue + elif netRestricted is None or \ + netRestricted != rule.restricted: + continue + + # shared + if rule.shared is not None: + try: + if cha.shared() != rule.shared: + continue + except ValueError: + if staShared is not None: + if staShared != rule.shared: + continue + elif netShared is None or \ + netShared != rule.shared: + continue + + # netClass + if rule.netClass is not None and \ + net.netClass() != rule.netClass: + continue + + # archive + if rule.archive is not None and \ + net.archive() != rule.archive: + continue + + # the rule matched + match = True + break + + if (match and rule.exclude) or \ + (not match and includeRuleDefined): + loc.removeStream(iCha) + delCha += 1 + reason = "no matching include rule" + if match: + reason = "'%s'" % rule.name + if self._debugFilter: + debugLines.append( + "%s [-]: %s" % (code, reason)) + else: + iCha += 1 + reason = "no matching exclude rule" + if match: + reason = "'%s'" % rule.name + if self._debugFilter: + debugLines.append( + "%s [+]: %s" % (code, reason)) + + # remove empty sensor locations + if loc.streamCount() == 0: + sta.removeSensorLocation(iLoc) + delLoc += 1 + else: + iLoc += 1 + + # remove empty stations + if sta.sensorLocationCount() == 0: + delSta += 1 + net.removeStation(iSta) + else: + iSta += 1 + + # remove empty networks + if net.stationCount() == 0: + delNet += 1 + inv.removeNetwork(iNet) + else: + iNet += 1 + + if serviceName: + serviceName += ": " + seiscomp.logging.debug( + "%sremoved %i networks, %i stations, %i locations, %i streams" % ( + serviceName, delNet, delSta, delLoc, delCha)) + if self._debugFilter: + debugLines.sort() + seiscomp.logging.notice("%sfilter decisions based on file %s:\n%s" % ( + serviceName, fileName, str("\n".join(debugLines)))) + + return True + + # ------------------------------------------------------------------------- + def _getAuthSessionWrapper(self, realm, msg): + p = portal.Portal(realm, [self._checker]) + f = guard.DigestCredentialFactory('MD5', msg) + f.digest = credentials.DigestCredentialFactory('MD5', py3bstr(msg)) + return HTTPAuthSessionWrapper(p, [f]) + + +fdsnwsApp = FDSNWS(len(sys.argv), sys.argv) +sys.exit(fdsnwsApp()) + + +# vim: ts=4 et tw=79 diff --git a/bin/fdsnxml2inv b/bin/fdsnxml2inv new file mode 100755 index 0000000..3aecdc4 Binary files /dev/null and b/bin/fdsnxml2inv differ diff --git a/bin/import_inv b/bin/import_inv new file mode 100755 index 0000000..3b963d7 --- /dev/null +++ b/bin/import_inv @@ -0,0 +1,134 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +import sys +import os +import subprocess +import glob +import seiscomp.client + + +class Importer(seiscomp.client.Application): + def __init__(self, argc, argv): + seiscomp.client.Application.__init__(self, argc, argv) + + self.setMessagingEnabled(False) + self.setDatabaseEnabled(False, False) + + self._args = argv[1:] + + def run(self): + if len(self._args) == 0: + sys.stderr.write( + "Usage: import_inv [{format}|help] [output]\n") + return False + + if self._args[0] == "help": + if len(self._args) < 2: + sys.stderr.write("'help' can only be used with 'formats'\n") + sys.stderr.write("import_inv help formats\n") + return False + + if self._args[1] == "formats": + return self.printFormats() + + sys.stderr.write("unknown topic '%s'\n" % self._args[1]) + return False + + fmt = self._args[0] + try: + prog = os.path.join( + os.environ['SEISCOMP_ROOT'], "bin", "%s2inv" % fmt) + except: + sys.stderr.write( + "Could not get SeisComP root path, SEISCOMP_ROOT not set?\n") + return False + + if not os.path.exists(prog): + sys.stderr.write("Format '%s' is not supported\n" % fmt) + return False + + if len(self._args) < 2: + sys.stderr.write("Input missing\n") + return False + + input = self._args[1] + + if len(self._args) < 3: + filename = os.path.basename(os.path.abspath(input)) + if not filename: + filename = fmt + + # Append .xml if the ending is not already .xml + if filename[-4:] != ".xml": + filename = filename + ".xml" + storage_dir = os.path.join( + os.environ['SEISCOMP_ROOT'], "etc", "inventory") + output = os.path.join(storage_dir, filename) + try: + os.makedirs(storage_dir) + except: + pass + sys.stderr.write("Generating output to %s\n" % output) + else: + output = self._args[2] + + proc = subprocess.Popen([prog, input, output], + stdout=None, stderr=None, shell=False) + chans = proc.communicate() + if proc.returncode != 0: + sys.stderr.write( + "Conversion failed, return code: %d\n" % proc.returncode) + return False + + return True + + def printFormats(self): + try: + path = os.path.join(os.environ['SEISCOMP_ROOT'], "bin", "*2inv") + except: + sys.stderr.write( + "Could not get SeisComP root path, SEISCOMP_ROOT not set?\n") + return False + + files = glob.glob(path) + for f in files: + prog = os.path.basename(f) + prog = prog[:prog.find("2inv")] + sys.stdout.write("%s\n" % prog) + + return True + + def printUsage(self): + + print('''Usage: + import_inv [FORMAT] input [output] + import_inv help [topic] + +Import inventory information from various sources.''') + + seiscomp.client.Application.printUsage(self) + + print('''Examples: +List all supported inventory formats + import_inv help formats + +Convert from FDSN stationXML to SeisComp format + import_inv fdsnxml inventory_fdsnws.xml inventory_sc.xml +''') + +if __name__ == "__main__": + app = Importer(len(sys.argv), sys.argv) + sys.exit(app()) diff --git a/bin/instdb2db2 b/bin/instdb2db2 new file mode 100755 index 0000000..51bde2e --- /dev/null +++ b/bin/instdb2db2 @@ -0,0 +1,278 @@ +#!/usr/bin/env seiscomp-python + +from __future__ import print_function +import sys, os +import csv +from optparse import OptionParser + +def quote(instr): + return '"'+instr+'"' + +class base(object): + def __init__(self, filename, fields): + self.att = {} + fd = open(filename) + try: + try: + fieldNames = None + for row in csv.DictReader(fd, fieldNames): + id = row['id'] + if id in self.att: + print("multiple %s found in %s" % (id, filename)) + continue + + for key in fields: + if not row[key]: + del(row[key]) + + del row['id'] + + try: + row['low_freq'] = float(row['low_freq']) + except KeyError: + pass + + try: + row['high_freq'] = float(row['high_freq']) + except KeyError: + pass + + self.att[id] = row + + except KeyError as e: + raise Exception("column %s missing in %s" % (str(e), filename)) + + except (TypeError, ValueError) as e: + raise Exception("error reading %s: %s" % (filename, str(e))) + + finally: + fd.close() + + def keys(self): + return list(self.att.keys()) + + def screname(self, what): + nc = "" + nu = True + for c in what: + if c == '_': + nu = True + continue + if nu: + nc += c.upper() + nu = False + else: + nc += c + + if nc == 'LowFreq': nc = 'LowFrequency' + if nc == 'HighFreq': nc = 'HighFrequency' + + return nc + + def reorder(self): + att = {} + if not self.att: + return None + + for (code, row) in self.att.items(): + for (k, v) in row.items(): + k = self.screname(k) + try: + dk = att[k] + except: + dk = {} + att[k] = dk + + try: + dv = dk[str(v)] + except: + dv = [] + dk[str(v)] = dv + + dv.append(code) + return att + + def dump(self, fdo): + att = self.reorder() + lastK=None + + for (k, v) in att.items(): + if not lastK: lastK = k + if lastK != k: + fdo.write("\n") + for (kv, ids) in v.items(): + fdo.write("Ia: %s=%s" % (k,quote(kv))) + for id in ids: + fdo.write(" %s" % id) + fdo.write("\n") + fdo.write("\n") + +class sensorAttributes(base): + def __init__(self, filename): + base.__init__(self, filename, ['id', 'type','unit', 'low_freq', 'high_freq', 'model', 'manufacturer', 'remark']) + +class dataloggerAttributes(base): + def __init__(self, filename): + base.__init__(self, filename, ['id', 'digitizer_model', 'digitizer_manufacturer', 'recorder_model', 'recorder_manufacturer', 'clock_model', 'clock_manufacturer', 'clock_type', 'remark']) + +class INST(object): + def cleanID(self, id): + nc = "" + for c in id: + nc += c + if c == '_': + nc = "" + + return nc + + def __init__(self, filename, attS, attD): + self.filename = filename + self.sensorA = sensorAttributes(attS) + self.dataloggerA = dataloggerAttributes(attD) + lines = [] + f = open(filename) + for line in f: + line = line.strip() + if not line or line[0] == '#': + # Add comments line types + lines.append({ 'content': line, 'type': 'C', 'id': None}) + else: + (id, line) = line.split(">", 1) + id = id.strip() + line = line.strip() + # Add undefined line types + lines.append({ 'content': line, 'type': 'U', 'id': id}) + f.close() + self.lines = lines + self._filltypes() + + def _filltypes(self): + for line in self.lines: + if line['type'] != 'U': continue + id = line['id'] + if id.find('_FIR_') != -1: + line['type'] = 'F' + elif id.find('Sngl-gain_') != -1: + line['type'] = 'L' + line['id'] = self.cleanID(id) + elif id.find('_digipaz_') != -1: + line['type'] = 'P' + elif id.find('_iirpaz_') != -1: + line['type'] = 'I' + + for line in self.lines: + if line['type'] != 'U': continue + id = self.cleanID(line['id']) + + if id in list(self.sensorA.keys()): + line['type'] = 'S' + line['id'] = id + elif id in list(self.dataloggerA.keys()): + line['type'] = 'D' + line['id'] = id + # Those we are forcing ! + elif id in ['OSIRIS-SC', 'Gaia', 'LE24', 'MALI', 'PSS', 'FDL', 'CMG-SAM', 'CMG-DCM', 'EDAS-24', 'SANIAC']: + line['id'] = id + line['type'] = 'D' + elif id in ['Trillium-Compact', 'Reftek-151/120', 'BBVS-60', 'CMG-3ESP/60F', 'LE-1D/1', 'L4-3D/BW', 'S13', 'GS13', 'SH-1', 'MP', 'MARKL22', 'CM-3', 'CMG-6T', 'SM-6/BW']: + line['id'] = id + line['type'] = 'S' + + for line in self.lines: + if line['type'] == 'U': + print("'"+self.cleanID(line['id'])+"', ", end=' ') + + def dump(self, fdo): + sa = False + da = False + + dataloggerFieldSize = 0 + sensorFieldSize = 0 + for line in self.lines: + if line['type'] == 'C': continue + if line['type'] == 'S': + if len(line['id']) > sensorFieldSize: + sensorFieldSize = len(line['id']) + if line['type'] == 'D': + if len(line['id']) > dataloggerFieldSize: + dataloggerFieldSize = len(line['id']) + + seLine = "Se: %%%ss %%s\n" % (-1*(sensorFieldSize+1)) + dtLine = "Dl: %%%ss %%s\n" % (-1*(dataloggerFieldSize+1)) + for line in self.lines: + if line['type'] == 'C': + fdo.write(line['content'] + "\n") + continue + + if line['type'] == 'S': + if not sa: + self.sensorA.dump(fdo) + sa = True + fdo.write(seLine % (line['id'], line['content'])) + continue + + if line['type'] == 'D': + if not da: + self.dataloggerA.dump(fdo) + da = True + fdo.write(dtLine % (line['id'], line['content'])) + continue + + if line['type'] == 'L': + fdo.write("Cl: %s %s\n" % (line['id'], line['content'])) + continue + + if line['type'] == 'F': + fdo.write("Ff: %s %s\n" % (line['id'], line['content'])) + continue + + if line['type'] == 'P': + fdo.write("Pz: %s %s\n" % (line['id'], line['content'])) + continue + + + if line['type'] == 'I': + fdo.write("If: %s %s\n" % (line['id'], line['content'])) + continue + +def main(): + + parser = OptionParser(usage="Old tab to New tab converter", version="1.0", add_help_option=True) + + parser.add_option("", "--sat", type="string", + help="Indicates the sensor attribute file to use", dest="sat", default="sensor_attr.csv") + parser.add_option("", "--dat", type="string", + help="Indicates the station attribute file to use", dest="dat", default="datalogger_attr.csv") + parser.add_option("-c", "--clean", action="store_true", + help="Remove the comments and blank lines", dest="cleanFile", default=False) + + # Parsing & Error check + (options, args) = parser.parse_args() + errors = [] + + if len(args) != 1: + errors.append("need an Input filename") + + if not os.path.isfile(options.sat): + errors.append("sensor attribute file '%s' not found." % options.sat) + + if not os.path.isfile(options.dat): + errors.append("datalogger attribute file '%s' not found." % options.dat) + + if len(args) == 2 and os.path.isfile(args[1]): + errors.append("output file already exists, will not overwrite.") + + if errors: + print("Found error while processing the command line:", file=sys.stderr) + for error in errors: + print(" %s" % error, file=sys.stderr) + return 1 + + inputName = args[0] + i= INST(inputName, options.sat, options.dat) + fdo = sys.stdout if len(args) < 2 else open(args[1],"w") + i.dump(fdo) + fdo.close() + +if __name__ == "__main__": + main() diff --git a/bin/inv2dlsv b/bin/inv2dlsv new file mode 100755 index 0000000..343907e --- /dev/null +++ b/bin/inv2dlsv @@ -0,0 +1,98 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +from __future__ import (absolute_import, division, print_function, unicode_literals) + +import sys +import io +from seiscomp.legacy.fseed import * +from seiscomp.legacy.db.seiscomp3 import sc3wrap +from seiscomp.legacy.db.seiscomp3.inventory import Inventory +import seiscomp.datamodel, seiscomp.io + +ORGANIZATION = "EIDA" + + +def iterinv(obj): + return (j for i in obj.values() for j in i.values()) + + +def main(): + if len(sys.argv) < 1 or len(sys.argv) > 3: + sys.stderr.write("Usage inv2dlsv [in_xml [out_dataless]]\n") + return 1 + + if len(sys.argv) > 1: + inFile = sys.argv[1] + else: + inFile = "-" + + if len(sys.argv) > 2: + out = sys.argv[2] + else: + out = "" + + sc3wrap.dbQuery = None + + ar = seiscomp.io.XMLArchive() + if ar.open(inFile) == False: + raise IOError(inFile + ": unable to open") + + obj = ar.readObject() + if obj is None: + raise TypeError(inFile + ": invalid format") + + sc3inv = seiscomp.datamodel.Inventory.Cast(obj) + if sc3inv is None: + raise TypeError(inFile + ": invalid format") + + inv = Inventory(sc3inv) + inv.load_stations("*", "*", "*", "*") + inv.load_instruments() + + vol = SEEDVolume(inv, ORGANIZATION, "", resp_dict=False) + + for net in iterinv(inv.network): + for sta in iterinv(net.station): + for loc in iterinv(sta.sensorLocation): + for strm in iterinv(loc.stream): + try: + vol.add_chan(net.code, sta.code, loc.code, + strm.code, strm.start, strm.end) + + except SEEDError as e: + sys.stderr.write("Error (%s,%s,%s,%s): %s\n" % ( + net.code, sta.code, loc.code, strm.code, str(e))) + + if not out or out == "-": + output = io.BytesIO() + vol.output(output) + stdout = sys.stdout.buffer if hasattr(sys.stdout, "buffer") else sys.stdout + stdout.write(output.getvalue()) + stdout.flush() + output.close() + else: + with open(sys.argv[2], "wb") as fd: + vol.output(fd) + + return 0 + + +if __name__ == "__main__": + try: + sys.exit(main()) + except Exception as e: + sys.stderr.write("Error: %s" % str(e)) + sys.exit(1) diff --git a/bin/invextr b/bin/invextr new file mode 100755 index 0000000..dae9a07 Binary files /dev/null and b/bin/invextr differ diff --git a/bin/load_timetable b/bin/load_timetable new file mode 100755 index 0000000..a4800a7 Binary files /dev/null and b/bin/load_timetable differ diff --git a/bin/msrtsimul b/bin/msrtsimul new file mode 100755 index 0000000..f1bcaff --- /dev/null +++ b/bin/msrtsimul @@ -0,0 +1,280 @@ +#!/usr/bin/env seiscomp-python + +from __future__ import absolute_import, division, print_function + +import sys +import os +import time +import datetime +import calendar +import stat + +from getopt import getopt, GetoptError +from seiscomp import mseedlite as mseed + + +#------------------------------------------------------------------------------ +def read_mseed_with_delays(delaydict, reciterable): + """ + Create an iterator which takes into account configurable realistic delays. + + This function creates an iterator which returns one miniseed record at a time. + Artificial delays can be introduced by using delaydict. + + This function can be used to make simulations in real time more realistic + when e.g. some stations have a much higher delay than others due to + narrow bandwidth communication channels etc. + + A delaydict has the following data structure: + keys: XX.ABC (XX: network code, ABC: station code). The key "default" is + a special value for the default delay. + values: Delay to be introduced in seconds + + This function will rearrange the iterable object which has been used as + input for rt_simul() so that it can again be used by rt_simul but taking + artificial delays into account. + """ + import heapq #pylint: disable=C0415 + + heap = [] + min_delay = 0 + default_delay = 0 + if 'default' in delaydict: + default_delay = delaydict['default'] + for rec in reciterable: + rec_time = calendar.timegm(rec.end_time.timetuple()) + delay_time = rec_time + stationname = "%s.%s" % (rec.net, rec.sta) + if stationname in delaydict: + delay_time = rec_time + delaydict[stationname] + else: + delay_time = rec_time + default_delay + heapq.heappush(heap, (delay_time, rec)) + toprectime = heap[0][0] + if toprectime - min_delay < rec_time: + topelement = heapq.heappop(heap) + yield topelement + while heap: + topelement = heapq.heappop(heap) + yield topelement + + +#------------------------------------------------------------------------------ +def rt_simul(f, speed=1., jump=0., delaydict=None): + """ + Iterator to simulate "real-time" MSeed input + + At startup, the first MSeed record is read. The following records are + read in pseudo-real-time relative to the time of the first record, + resulting in data flowing at realistic speed. This is useful e.g. for + demonstrating real-time processing using real data of past events. + + The data in the input file may be multiplexed, but *must* be sorted by + time, e.g. using 'mssort'. + """ + rtime = time.time() + etime = None + skipping = True + record_iterable = mseed.Input(f) + if delaydict: + record_iterable = read_mseed_with_delays(delaydict, record_iterable) + for rec in record_iterable: + if delaydict: + rec_time = rec[0] + rec = rec[1] + else: + rec_time = calendar.timegm(rec.end_time.timetuple()) + if etime is None: + etime = rec_time + + if skipping: + if (rec_time - etime) / 60.0 < jump: + continue + + etime = rec_time + skipping = False + + tmax = etime + speed * (time.time() - rtime) + ms = 1000000.0 * (rec.nsamp / rec.fsamp) + last_sample_time = rec.begin_time + datetime.timedelta(microseconds=ms) + last_sample_time = calendar.timegm(last_sample_time.timetuple()) + if last_sample_time > tmax: + time.sleep((last_sample_time - tmax + 0.001) / speed) + yield rec + + +#------------------------------------------------------------------------------ +def usage(): + print('''Usage: + msrtsimul [options] file + +MiniSEED real time playback and simulation + +msrtsimul reads sorted (and possibly multiplexed) MiniSEED files and writes +individual records in pseudo-real-time. This is useful e.g. for testing and +simulating data acquisition. Output is +$SEISCOMP_ROOT/var/run/seedlink/mseedfifo unless --seedlink or -c is used. + + +Options: + -c, --stdout write on standard output + -d, --delays add artificial delays + -s, --speed speed factor (float) + -j, --jump minutes to skip (float) + --test test mode + -m --mode choose between 'realtime' and 'historic' + --seedlink choose the seedlink module name. Useful if a seedlink + alias or non-standard names are used. Replaces 'seedlink' + in the standard mseedfifo path. + -v, --verbose verbose mode + -h, --help display this help message + +Examples: +Play back miniSEED waveforms in real time with verbose output + msrtsimul -v miniSEED-file +''') + + +#------------------------------------------------------------------------------ +def main(): + py2 = sys.version_info < (3,) + + ifile = sys.stdin if py2 else sys.stdin.buffer + verbosity = 0 + speed = 1. + jump = 0. + test = False + seedlink = 'seedlink' + mode = 'realtime' + setSystemTime = False + + try: + opts, args = getopt(sys.argv[1:], "cd:s:j:vhm:", + ["stdout", "delays=", "speed=", "jump=", "test", + "verbose", "help", "mode=", "seedlink="]) + except GetoptError: + usage() + return 1 + + out_channel = None + delays = None + + for flag, arg in opts: + if flag in ("-c", "--stdout"): + out_channel = sys.stdout if py2 else sys.stdout.buffer + elif flag in ("-d", "--delays"): + delays = arg + elif flag in ("-s", "--speed"): + speed = float(arg) + elif flag in ("-j", "--jump"): + jump = float(arg) + elif flag in ("-m", "--mode"): + mode = arg + elif flag == "--seedlink": + seedlink = arg + elif flag in ("-v", "--verbose"): + verbosity += 1 + elif flag == "--test": + test = True + else: + usage() + if flag in ("-h", "--help"): + return 0 + return 1 + + if len(args) == 1: + if args[0] != "-": + try: + ifile = open(args[0], "rb") + except IOError as e: + print("could not open input file '{}' for reading: {}" \ + .format(args[0], e), file=sys.stderr) + sys.exit(1) + elif len(args) != 0: + usage() + return 1 + + if out_channel is None: + try: + sc_root = os.environ["SEISCOMP_ROOT"] + except KeyError: + print("SEISCOMP_ROOT environment variable is not set", file=sys.stderr) + sys.exit(1) + + mseed_fifo = os.path.join(sc_root, "var", "run", seedlink, "mseedfifo") + if verbosity: + print("output data to %s" % mseed_fifo, file=sys.stderr) + + if not os.path.exists(mseed_fifo): + print("""\ +ERROR: {} does not exist. +In order to push the records to SeedLink, it needs to run and must be configured for real-time playback. +""".format(mseed_fifo), file=sys.stderr) + sys.exit(1) + + if not stat.S_ISFIFO(os.stat(mseed_fifo).st_mode): + print("""\ +ERROR: {} is not a named pipe +Check if SeedLink is running and configured for real-time playback. +""".format(mseed_fifo), file=sys.stderr) + sys.exit(1) + + try: + out_channel = open(mseed_fifo, "wb") + except Exception as e: + print(str(e), file=sys.stderr) + sys.exit(1) + + try: + delaydict = None + if delays: + delaydict = dict() + try: + f = open(delays, 'r') + for line in f: + content = line.split(':') + if len(content) != 2: + raise Exception("Could not parse a line in file %s: %s\n" % (delays, line)) + delaydict[content[0].strip()] = float(content[1].strip()) + except Exception as e: + print("Error reading delay file {}: {}".format(delays, e), + file=sys.stderr) + + inp = rt_simul(ifile, speed=speed, jump=jump, delaydict=delaydict) + stime = time.time() + + time_diff = None + print("Starting msrtsimul at {}".format(datetime.datetime.utcnow()), file=sys.stderr) + for rec in inp: + if rec.size != 512: + print("Skipping record of {}.{}.{}.{} starting on {}: length != 512 Bytes: ".format(rec.net, rec.sta, rec.loc, rec.cha, str(rec.begin_time)), file=sys.stderr) + continue + if time_diff is None: + ms = 1000000.0 * (rec.nsamp / rec.fsamp) + time_diff = datetime.datetime.utcnow() - rec.begin_time - \ + datetime.timedelta(microseconds=ms) + if mode == 'realtime': + rec.begin_time += time_diff + + if verbosity: + print("%s_%s %7.2f %s %7.2f" % \ + (rec.net, rec.sta, (time.time() - stime), str(rec.begin_time), + time.time() - calendar.timegm(rec.begin_time.timetuple())), + file=sys.stderr) + + if not test: + rec.write(out_channel, 9) + out_channel.flush() + + except KeyboardInterrupt: + pass + except Exception as e: + print("Exception: {}".format(str(e)), file=sys.stderr) + return 1 + + return 0 + + +#------------------------------------------------------------------------------ +if __name__ == "__main__": + sys.exit(main()) diff --git a/bin/ql2sc b/bin/ql2sc new file mode 100755 index 0000000..dbab169 Binary files /dev/null and b/bin/ql2sc differ diff --git a/bin/run_with_lock b/bin/run_with_lock new file mode 100755 index 0000000..9c0e4b9 Binary files /dev/null and b/bin/run_with_lock differ diff --git a/bin/sc2pa b/bin/sc2pa new file mode 100755 index 0000000..87db810 --- /dev/null +++ b/bin/sc2pa @@ -0,0 +1,217 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +import time +import sys +import os +import time +import seiscomp.core, seiscomp.client, seiscomp.datamodel, seiscomp.logging +from seiscomp.scbulletin import Bulletin, stationCount + + +class ProcAlert(seiscomp.client.Application): + def __init__(self, argc, argv): + seiscomp.client.Application.__init__(self, argc, argv) + + self.setMessagingEnabled(True) + self.setDatabaseEnabled(True, True) + + self.setAutoApplyNotifierEnabled(True) + self.setInterpretNotifierEnabled(True) + + self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP) + self.addMessagingSubscription("EVENT") + self.addMessagingSubscription("LOCATION") + self.addMessagingSubscription("MAGNITUDE") + + self.maxAgeDays = 1. + self.minPickCount = 25 + + self.procAlertScript = "" + + ep = seiscomp.datamodel.EventParameters() + + def createCommandLineDescription(self): + try: + self.commandline().addGroup("Publishing") + self.commandline().addIntOption("Publishing", "min-arr", + "Minimum arrival count of a published origin", self.minPickCount) + self.commandline().addDoubleOption("Publishing", "max-age", + "Maximum age in days of published origins", self.maxAgeDays) + self.commandline().addStringOption("Publishing", "procalert-script", + "Specify the script to publish an event. The ProcAlert file and the event id are passed as parameter $1 and $2") + self.commandline().addOption("Publishing", "test", + "Test mode, no messages are sent") + except: + seiscomp.logging.warning( + "caught unexpected error %s" % sys.exc_info()) + + def initConfiguration(self): + if not seiscomp.client.Application.initConfiguration(self): + return False + + try: + self.procAlertScript = self.configGetString("scripts.procAlert") + except: + pass + + try: + self.minPickCount = self.configGetInt("minArrivals") + except: + pass + + try: + self.maxAgeDays = self.configGetDouble("maxAgeDays") + except: + pass + + return True + + def init(self): + if not seiscomp.client.Application.init(self): + return False + + try: + self.procAlertScript = self.commandline().optionString("procalert-script") + except: + pass + + try: + self.minPickCount = self.commandline().optionInt("min-arr") + except: + pass + + try: + self.maxAgeDays = self.commandline().optionDouble("max-age") + except: + pass + + self.bulletin = Bulletin(self.query(), "autoloc1") + self.cache = seiscomp.datamodel.PublicObjectRingBuffer( + self.query(), 100) + + if not self.procAlertScript: + seiscomp.logging.warning("No procalert script given") + else: + seiscomp.logging.info( + "Using procalert script: %s" % self.procAlertScript) + + return True + + def addObject(self, parentID, obj): + org = seiscomp.datamodel.Origin.Cast(obj) + if org: + self.cache.feed(org) + seiscomp.logging.info("Received origin %s" % org.publicID()) + return + + self.updateObject(parentID, obj) + + def updateObject(self, parentID, obj): + try: + evt = seiscomp.datamodel.Event.Cast(obj) + if evt: + orid = evt.preferredOriginID() + + org = self.cache.get(seiscomp.datamodel.Origin, orid) + if not org: + seiscomp.logging.error("Unable to fetch origin %s" % orid) + return + + if org.arrivalCount() == 0: + self.query().loadArrivals(org) + if org.stationMagnitudeCount() == 0: + self.query().loadStationMagnitudes(org) + if org.magnitudeCount() == 0: + self.query().loadMagnitudes(org) + + if not self.originMeetsCriteria(org, evt): + seiscomp.logging.warning("Origin %s not published" % orid) + return + + txt = self.bulletin.printEvent(evt) + + for line in txt.split("\n"): + line = line.rstrip() + seiscomp.logging.info(line) + seiscomp.logging.info("") + + if not self.commandline().hasOption("test"): + self.send_procalert(txt, evt.publicID()) + + return + + except: + sys.stderr.write("%s\n" % sys.exc_info()) + + def hasValidNetworkMagnitude(self, org, evt): + nmag = org.magnitudeCount() + for imag in range(nmag): + mag = org.magnitude(imag) + if mag.publicID() == evt.preferredMagnitudeID(): + return True + return False + + def send_procalert(self, txt, evid): + if self.procAlertScript: + tmp = "/tmp/yyy%s" % evid.replace("/", "_").replace(":", "-") + f = file(tmp, "w") + f.write("%s" % txt) + f.close() + + os.system(self.procAlertScript + " " + tmp + " " + evid) + + def coordinates(self, org): + return org.latitude().value(), org.longitude().value(), org.depth().value() + + def originMeetsCriteria(self, org, evt): + publish = True + + lat, lon, dep = self.coordinates(org) + + if 43 < lat < 70 and -10 < lon < 60 and dep > 200: + seiscomp.logging.error("suspicious region/depth - ignored") + publish = False + + if stationCount(org) < self.minPickCount: + seiscomp.logging.error("too few picks - ignored") + publish = False + + now = seiscomp.core.Time.GMT() + if (now - org.time().value()).seconds()/86400. > self.maxAgeDays: + seiscomp.logging.error("origin too old - ignored") + publish = False + + try: + if org.evaluationMode() == seiscomp.datamodel.MANUAL: + publish = True + except: + pass + + try: + if org.evaluationStatus() == seiscomp.datamodel.CONFIRMED: + publish = True + except: + pass + + if not self.hasValidNetworkMagnitude(org, evt): + seiscomp.logging.error("no network magnitude - ignored") + publish = False + + return publish + + +app = ProcAlert(len(sys.argv), sys.argv) +sys.exit(app()) diff --git a/bin/sc32inv b/bin/sc32inv new file mode 120000 index 0000000..7d624b2 --- /dev/null +++ b/bin/sc32inv @@ -0,0 +1 @@ +scml2inv \ No newline at end of file diff --git a/bin/scalert b/bin/scalert new file mode 100755 index 0000000..46923be --- /dev/null +++ b/bin/scalert @@ -0,0 +1,717 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +import os +import sys +import re +import subprocess +import traceback +import seiscomp.core, seiscomp.client, seiscomp.datamodel, seiscomp.math +import seiscomp.logging, seiscomp.seismology, seiscomp.system + + +class ObjectAlert(seiscomp.client.Application): + + def __init__(self, argc, argv): + seiscomp.client.Application.__init__(self, argc, argv) + + self.setMessagingEnabled(True) + self.setDatabaseEnabled(True, True) + self.setLoadRegionsEnabled(True) + self.setMessagingUsername("") + self.setPrimaryMessagingGroup( + seiscomp.client.Protocol.LISTENER_GROUP) + self.addMessagingSubscription("EVENT") + self.addMessagingSubscription("LOCATION") + self.addMessagingSubscription("MAGNITUDE") + + self.setAutoApplyNotifierEnabled(True) + self.setInterpretNotifierEnabled(True) + + self.setLoadCitiesEnabled(True) + self.setLoadRegionsEnabled(True) + + self._ampType = "snr" + self._citiesMaxDist = 20 + self._citiesMinPopulation = 50000 + + self._eventDescriptionPattern = None + self._pickScript = None + self._ampScript = None + self._alertScript = None + self._eventScript = None + + self._pickProc = None + self._ampProc = None + self._alertProc = None + self._eventProc = None + + self._newWhenFirstSeen = False + self._oldEvents = [] + self._agencyIDs = [] + self._phaseHints = [] + self._phaseStreams = [] + self._phaseNumber = 1 + self._phaseInterval = 1 + + def createCommandLineDescription(self): + self.commandline().addOption("Generic", "first-new", + "calls an event a new event when it is seen the first time") + self.commandline().addGroup("Alert") + self.commandline().addStringOption("Alert", "amp-type", + "amplitude type to listen to", self._ampType) + self.commandline().addStringOption("Alert", "pick-script", + "script to be called when a pick arrived, network-, station code pick publicID are passed as parameters $1, $2, $3 and $4") + self.commandline().addStringOption("Alert", "amp-script", + "script to be called when a station amplitude arrived, network-, station code, amplitude and amplitude publicID are passed as parameters $1, $2, $3 and $4") + self.commandline().addStringOption("Alert", "alert-script", + "script to be called when a preliminary origin arrived, latitude and longitude are passed as parameters $1 and $2") + self.commandline().addStringOption("Alert", "event-script", + "script to be called when an event has been declared; the message string, a flag (1=new event, 0=update event), the EventID, the arrival count and the magnitude (optional when set) are passed as parameter $1, $2, $3, $4 and $5") + self.commandline().addGroup("Cities") + self.commandline().addStringOption("Cities", "max-dist", + "maximum distance for using the distance from a city to the earthquake") + self.commandline().addStringOption("Cities", "min-population", + "minimum population for a city to become a point of interest") + self.commandline().addGroup("Debug") + self.commandline().addStringOption("Debug", "eventid,E", "specify Event ID") + return True + + def init(self): + if not seiscomp.client.Application.init(self): + return False + + foundScript = False + # module configuration paramters + try: + self._newWhenFirstSeen = self.configGetBool("firstNew") + except: + pass + + try: + self._agencyIDs = [self.configGetString("agencyID")] + except: + pass + + try: + agencyIDs = self.configGetStrings("agencyIDs") + self._agencyIDs = [] + for item in agencyIDs: + item = item.strip() + if item not in self._agencyIDs: + self._agencyIDs.append(item) + except: + pass + + + self._phaseHints = ['P','S'] + try: + phaseHints = self.configGetStrings("constraints.phaseHints") + self._phaseHints = [] + for item in phaseHints: + item = item.strip() + if item not in self._phaseHints: + self._phaseHints.append(item) + except: + pass + + self._phaseStreams = [] + try: + phaseStreams = self.configGetStrings("constraints.phaseStreams") + for item in phaseStreams: + rule = item.strip() + # rule is NET.STA.LOC.CHA and the special charactes ? * | ( ) are allowed + if not re.fullmatch(r'[A-Z|a-z|0-9|\?|\*|\||\(|\)|\.]+', rule): + seiscomp.logging.error("Wrong stream ID format in `constraints.phaseStreams`: %s" % item) + return False + # convert rule to a valid regular expression + rule = re.sub(r'\.', r'\.', rule) + rule = re.sub(r'\?', '.' , rule) + rule = re.sub(r'\*' , '.*' , rule) + if rule not in self._phaseStreams: + self._phaseStreams.append(rule) + except: + pass + + try: + self._phaseNumber = self.configGetInt("constraints.phaseNumber") + except: + pass + + try: + self._phaseInterval = self.configGetInt("constraints.phaseInterval") + except: + pass + + if self._phaseNumber > 1: + self._pickCache = seiscomp.datamodel.PublicObjectTimeSpanBuffer() + self._pickCache.setTimeSpan(seiscomp.core.TimeSpan(self._phaseInterval)) + self.enableTimer(1) + + try: + self._eventDescriptionPattern = self.configGetString("poi.message") + except: + pass + + try: + self._citiesMaxDist = self.configGetDouble("poi.maxDist") + except: + pass + + try: + self._citiesMinPopulation = self.configGetInt("poi.minPopulation") + except: + pass + + # mostly command-line options + try: + self._citiesMaxDist = self.commandline().optionDouble("max-dist") + except: + pass + + try: + if self.commandline().hasOption("first-new"): + self._newWhenFirstSeen = True + except: + pass + + try: + self._citiesMinPopulation = self.commandline().optionInt("min-population") + except: + pass + + try: + self._ampType = self.commandline().optionString("amp-type") + except: + pass + + try: + self._pickScript = self.commandline().optionString("pick-script") + except: + try: + self._pickScript = self.configGetString("scripts.pick") + except: + seiscomp.logging.warning("No pick script defined") + + if self._pickScript: + self._pickScript = seiscomp.system.Environment.Instance().absolutePath(self._pickScript) + seiscomp.logging.info("Using pick script %s" % self._pickScript) + + if not os.path.isfile(self._pickScript): + seiscomp.logging.error(" + not exising") + return False + + if not os.access(self._pickScript, os.X_OK): + seiscomp.logging.error(" + not executable") + return False + + foundScript = True + + try: + self._ampScript = self.commandline().optionString("amp-script") + except: + try: + self._ampScript = self.configGetString("scripts.amplitude") + except: + seiscomp.logging.warning("No amplitude script defined") + + if self._ampScript: + self._ampScript = seiscomp.system.Environment.Instance().absolutePath(self._ampScript) + seiscomp.logging.info("Using amplitude script %s" % self._ampScript) + + if not os.path.isfile(self._ampScript): + seiscomp.logging.error(" + not exising") + return False + + if not os.access(self._ampScript, os.X_OK): + seiscomp.logging.error(" + not executable") + return False + + foundScript = True + + try: + self._alertScript = self.commandline().optionString("alert-script") + except: + try: + self._alertScript = self.configGetString("scripts.alert") + except: + seiscomp.logging.warning("No alert script defined") + + if self._alertScript: + self._alertScript = seiscomp.system.Environment.Instance( + ).absolutePath(self._alertScript) + seiscomp.logging.info("Using alert script %s" % self._alertScript) + + if not os.path.isfile(self._alertScript): + seiscomp.logging.error(" + not exising") + return False + + if not os.access(self._alertScript, os.X_OK): + seiscomp.logging.error(" + not executable") + return False + + foundScript = True + + try: + self._eventScript = self.commandline().optionString("event-script") + except: + try: + self._eventScript = self.configGetString("scripts.event") + except: + seiscomp.logging.warning("No event script defined") + + if self._eventScript: + self._eventScript = seiscomp.system.Environment.Instance( + ).absolutePath(self._eventScript) + seiscomp.logging.info("Using event script %s" % self._eventScript) + + if not os.path.isfile(self._eventScript): + seiscomp.logging.error(" + not exising") + return False + + if not os.access(self._eventScript, os.X_OK): + seiscomp.logging.error(" + not executable") + return False + + foundScript = True + + if not foundScript: + seiscomp.logging.error("Found no valid script in configuration") + return False + + seiscomp.logging.info("Creating ringbuffer for 100 objects") + if not self.query(): + seiscomp.logging.warning( + "No valid database interface to read from") + self._cache = seiscomp.datamodel.PublicObjectRingBuffer( + self.query(), 100) + + if self._ampScript and self.connection(): + seiscomp.logging.info( + "Amplitude script defined: subscribing to AMPLITUDE message group") + self.connection().subscribe("AMPLITUDE") + + if self._pickScript and self.connection(): + seiscomp.logging.info( + "Pick script defined: subscribing to PICK message group") + self.connection().subscribe("PICK") + + if self._newWhenFirstSeen: + seiscomp.logging.info( + "A new event is declared when I see it the first time") + + seiscomp.logging.info("Filtering:") + if " ".join(self._agencyIDs): + seiscomp.logging.info(" + agencyIDs filter for events and picks: %s" % (" ".join(self._agencyIDs))) + else: + seiscomp.logging.info(" + agencyIDs: no filter is applied") + + if " ".join(self._phaseHints): + seiscomp.logging.info(" + phase hint filter for picks: '%s'" % (" ".join(self._phaseHints))) + else: + seiscomp.logging.info(" + phase hints: no filter is applied") + + if " ".join(self._phaseStreams): + seiscomp.logging.info(" + phase stream ID filter for picks: '%s'" % (" ".join(self._phaseStreams))) + else: + seiscomp.logging.info(" + phase stream ID: no filter is applied") + + return True + + def run(self): + try: + try: + eventID = self.commandline().optionString("eventid") + event = self._cache.get(seiscomp.datamodel.Event, eventID) + if event: + self.notifyEvent(event) + except: + pass + + return seiscomp.client.Application.run(self) + except: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + return False + + + def runPickScript(self, pickObjectList): + if not self._pickScript: + return + + for pickObject in pickObjectList: + # parse values + try: + net = pickObject.waveformID().networkCode() + except: + net = "unknown" + try: + sta = pickObject.waveformID().stationCode() + except: + sta = "unknown" + pickID = pickObject.publicID() + try: + phaseHint = pickObject.phaseHint().code() + except: + phaseHint = "unknown" + + print(net, sta, pickID, phaseHint) + + if self._pickProc is not None: + if self._pickProc.poll() is None: + seiscomp.logging.info( + "Pick script still in progress -> wait one second") + self._pickProc.wait(1) + if self._pickProc.poll() is None: + seiscomp.logging.warning( + "Pick script still in progress -> skipping message") + return + try: + self._pickProc = subprocess.Popen( + [self._pickScript, net, sta, pickID, phaseHint]) + seiscomp.logging.info( + "Started pick script with pid %d" % self._pickProc.pid) + except: + seiscomp.logging.error( + "Failed to start pick script '%s'" % self._pickScript) + + def runAmpScript(self, ampObject): + if not self._ampScript: + return + + # parse values + net = ampObject.waveformID().networkCode() + sta = ampObject.waveformID().stationCode() + amp = ampObject.amplitude().value() + ampID = ampObject.publicID() + + if self._ampProc is not None: + if self._ampProc.poll() is None: + seiscomp.logging.warning( + "Amplitude script still in progress -> skipping message") + return + try: + self._ampProc = subprocess.Popen( + [self._ampScript, net, sta, "%.2f" % amp, ampID]) + seiscomp.logging.info( + "Started amplitude script with pid %d" % self._ampProc.pid) + except: + seiscomp.logging.error( + "Failed to start amplitude script '%s'" % self._ampScript) + + def runAlert(self, lat, lon): + if not self._alertScript: + return + + if self._alertProc is not None: + if self._alertProc.poll() is None: + seiscomp.logging.warning( + "AlertScript still in progress -> skipping message") + return + try: + self._alertProc = subprocess.Popen( + [self._alertScript, "%.1f" % lat, "%.1f" % lon]) + seiscomp.logging.info( + "Started alert script with pid %d" % self._alertProc.pid) + except: + seiscomp.logging.error( + "Failed to start alert script '%s'" % self._alertScript) + + def handleMessage(self, msg): + try: + dm = seiscomp.core.DataMessage.Cast(msg) + if dm: + for att in dm: + org = seiscomp.datamodel.Origin.Cast(att) + if org: + try: + if org.evaluationStatus() == seiscomp.datamodel.PRELIMINARY: + self.runAlert(org.latitude().value(), + org.longitude().value()) + except: + pass + + #ao = seiscomp.datamodel.ArtificialOriginMessage.Cast(msg) + # if ao: + # org = ao.origin() + # if org: + # self.runAlert(org.latitude().value(), org.longitude().value()) + # return + + seiscomp.client.Application.handleMessage(self, msg) + except: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + + def addObject(self, parentID, object): + try: + # pick + obj = seiscomp.datamodel.Pick.Cast(object) + if obj: + self._cache.feed(obj) + seiscomp.logging.debug("got new pick '%s'" % obj.publicID()) + agencyID = obj.creationInfo().agencyID() + phaseHint = obj.phaseHint().code() + if self._phaseStreams: + waveformID = "%s.%s.%s.%s" % ( + obj.waveformID().networkCode(), obj.waveformID().stationCode(), + obj.waveformID().locationCode(), obj.waveformID().channelCode()) + matched = False + for rule in self._phaseStreams: + if re.fullmatch(rule, waveformID): + matched = True + break + if not matched: + seiscomp.logging.debug( + " + stream ID %s does not match constraints.phaseStreams rules" + % (waveformID)) + return + + if not self._agencyIDs or agencyID in self._agencyIDs: + if not self._phaseHints or phaseHint in self._phaseHints: + self.notifyPick(obj) + else: + seiscomp.logging.debug(" + phase hint %s does not match '%s'" + % (phaseHint, self._phaseHints)) + else: + seiscomp.logging.debug(" + agencyID %s does not match '%s'" + % (agencyID, self._agencyIDs)) + return + + # amplitude + obj = seiscomp.datamodel.Amplitude.Cast(object) + if obj: + if obj.type() == self._ampType: + seiscomp.logging.debug("got new %s amplitude '%s'" % ( + self._ampType, obj.publicID())) + self.notifyAmplitude(obj) + return + + # origin + obj = seiscomp.datamodel.Origin.Cast(object) + if obj: + self._cache.feed(obj) + seiscomp.logging.debug("got new origin '%s'" % obj.publicID()) + + try: + if obj.evaluationStatus() == seiscomp.datamodel.PRELIMINARY: + self.runAlert(obj.latitude().value(), + obj.longitude().value()) + except: + pass + + return + + # magnitude + obj = seiscomp.datamodel.Magnitude.Cast(object) + if obj: + self._cache.feed(obj) + seiscomp.logging.debug( + "got new magnitude '%s'" % obj.publicID()) + return + + # event + obj = seiscomp.datamodel.Event.Cast(object) + if obj: + org = self._cache.get( + seiscomp.datamodel.Origin, obj.preferredOriginID()) + agencyID = org.creationInfo().agencyID() + seiscomp.logging.debug("got new event '%s'" % obj.publicID()) + if not self._agencyIDs or agencyID in self._agencyIDs: + self.notifyEvent(obj, True) + return + except: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + + def updateObject(self, parentID, object): + try: + obj = seiscomp.datamodel.Event.Cast(object) + if obj: + org = self._cache.get( + seiscomp.datamodel.Origin, obj.preferredOriginID()) + agencyID = org.creationInfo().agencyID() + seiscomp.logging.debug("update event '%s'" % obj.publicID()) + if not self._agencyIDs or agencyID in self._agencyIDs: + self.notifyEvent(obj, False) + except: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + + def handleTimeout(self): + self.checkEnoughPicks() + + def checkEnoughPicks(self): + if self._pickCache.size() >= self._phaseNumber: + # wait until self._phaseInterval has elapsed before calling the + # script (more picks might come) + timeWindowLength = (seiscomp.core.Time.GMT() - self._pickCache.oldest()).length() + if timeWindowLength >= self._phaseInterval: + picks = [seiscomp.datamodel.Pick.Cast(o) for o in self._pickCache] + self.runPickScript(picks) + self._pickCache.clear() + + def notifyPick(self, pick): + if self._phaseNumber <= 1: + self.runPickScript([pick]) + else: + self.checkEnoughPicks() + self._pickCache.feed(pick) + + def notifyAmplitude(self, amp): + self.runAmpScript(amp) + + def notifyEvent(self, evt, newEvent=True, dtmax=3600): + try: + org = self._cache.get( + seiscomp.datamodel.Origin, evt.preferredOriginID()) + if not org: + seiscomp.logging.warning( + "unable to get origin %s, ignoring event message" % evt.preferredOriginID()) + return + + preliminary = False + try: + if org.evaluationStatus() == seiscomp.datamodel.PRELIMINARY: + preliminary = True + except: + pass + + if preliminary == False: + nmag = self._cache.get( + seiscomp.datamodel.Magnitude, evt.preferredMagnitudeID()) + if nmag: + mag = nmag.magnitude().value() + mag = "magnitude %.1f" % mag + else: + if len(evt.preferredMagnitudeID()) > 0: + seiscomp.logging.warning( + "unable to get magnitude %s, ignoring event message" % evt.preferredMagnitudeID()) + else: + seiscomp.logging.warning( + "no preferred magnitude yet, ignoring event message") + return + + # keep track of old events + if self._newWhenFirstSeen: + if evt.publicID() in self._oldEvents: + newEvent = False + else: + newEvent = True + self._oldEvents.append(evt.publicID()) + + dsc = seiscomp.seismology.Regions.getRegionName( + org.latitude().value(), org.longitude().value()) + + if self._eventDescriptionPattern: + try: + city, dist, azi = self.nearestCity(org.latitude().value(), org.longitude( + ).value(), self._citiesMaxDist, self._citiesMinPopulation) + if city: + dsc = self._eventDescriptionPattern + region = seiscomp.seismology.Regions.getRegionName( + org.latitude().value(), org.longitude().value()) + distStr = str(int(seiscomp.math.deg2km(dist))) + dsc = dsc.replace("@region@", region).replace( + "@dist@", distStr).replace("@poi@", city.name()) + except: + pass + + seiscomp.logging.debug("desc: %s" % dsc) + + dep = org.depth().value() + now = seiscomp.core.Time.GMT() + otm = org.time().value() + + dt = (now - otm).seconds() + + # if dt > dtmax: + # return + + if dt > 3600: + dt = "%d hours %d minutes ago" % (dt/3600, (dt % 3600)/60) + elif dt > 120: + dt = "%d minutes ago" % (dt/60) + else: + dt = "%d seconds ago" % dt + + if preliminary: + message = "earthquake, XXL, preliminary, %s, %s" % (dt, dsc) + else: + message = "earthquake, %s, %s, %s, depth %d kilometers" % ( + dt, dsc, mag, int(dep+0.5)) + seiscomp.logging.info(message) + + if not self._eventScript: + return + + if self._eventProc is not None: + if self._eventProc.poll() is None: + seiscomp.logging.warning( + "EventScript still in progress -> skipping message") + return + + try: + param2 = 0 + param3 = 0 + param4 = "" + if newEvent: + param2 = 1 + + org = self._cache.get( + seiscomp.datamodel.Origin, evt.preferredOriginID()) + if org: + try: + param3 = org.quality().associatedPhaseCount() + except: + pass + + nmag = self._cache.get( + seiscomp.datamodel.Magnitude, evt.preferredMagnitudeID()) + if nmag: + param4 = "%.1f" % nmag.magnitude().value() + + self._eventProc = subprocess.Popen( + [self._eventScript, message, "%d" % param2, evt.publicID(), "%d" % param3, param4]) + seiscomp.logging.info( + "Started event script with pid %d" % self._eventProc.pid) + except: + seiscomp.logging.error("Failed to start event script '%s %s %d %d %s'" % ( + self._eventScript, message, param2, param3, param4)) + except: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + + def printUsage(self): + + print('''Usage: + scalert [options] + +Execute custom scripts upon arrival of objects or updates''') + + seiscomp.client.Application.printUsage(self) + + print('''Examples: +Execute scalert on command line with debug output + scalert --debug +''') + +app = ObjectAlert(len(sys.argv), sys.argv) +sys.exit(app()) diff --git a/bin/scamp b/bin/scamp new file mode 100755 index 0000000..811e759 Binary files /dev/null and b/bin/scamp differ diff --git a/bin/scardac b/bin/scardac new file mode 100755 index 0000000..be14b6a Binary files /dev/null and b/bin/scardac differ diff --git a/bin/scart b/bin/scart new file mode 100755 index 0000000..da672f2 --- /dev/null +++ b/bin/scart @@ -0,0 +1,1363 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +from __future__ import print_function + +from getopt import gnu_getopt, GetoptError + +import bisect +import glob +import re +import time +import sys +import os + +import seiscomp.core +import seiscomp.client +import seiscomp.config +import seiscomp.io +import seiscomp.system + + +class Archive: + def __init__(self, archiveDirectory): + self.archiveDirectory = archiveDirectory + self.filePool = dict() + self.filePoolSize = 100 + + def iterators(self, begin, end, net, sta, loc, cha): + t = time.gmtime(begin.seconds()) + t_end = time.gmtime(end.seconds()) + + start_year = t[0] + + for year in range(start_year, t_end[0] + 1): + if year > start_year: + begin = seiscomp.core.Time.FromYearDay(year, 1) + t = time.gmtime(begin.seconds()) + + if net == "*": + netdir = self.archiveDirectory + str(year) + "/" + try: + files = os.listdir(netdir) + except BaseException: + sys.stderr.write( + "info: skipping year %i - not found in archive %s\n" + % (year, netdir) + ) + continue + + its = [] + for file in files: + if not os.path.isdir(netdir + file): + continue + tmp_its = self.iterators(begin, end, file, sta, loc, cha) + for it in tmp_its: + its.append(it) + + return its + + if sta == "*": + stadir = self.archiveDirectory + str(year) + "/" + net + "/" + + try: + files = os.listdir(stadir) + except BaseException: + sys.stderr.write( + "info: skipping network '%s' - not found in archive %s\n" + % (net, stadir) + ) + continue + + its = [] + for file in files: + if not os.path.isdir(stadir + file): + continue + tmp_its = self.iterators(begin, end, net, file, loc, cha) + for it in tmp_its: + its.append(it) + + return its + + # Check if cha contains a regular expression or not + mr = re.match("[A-Z|a-z|0-9]*", cha) + if (mr and mr.group() != cha) or cha == "*": + cha = cha.replace("?", ".") + stadir = self.archiveDirectory + str(year) + "/" + net + "/" + sta + "/" + try: + files = os.listdir(stadir) + except BaseException: + sys.stderr.write( + "info: skipping station %s - no data files " + "found in archive %s\n" % (sta, stadir) + ) + return [] + + its = [] + for file in files: + if not os.path.isdir(stadir + file): + sys.stderr.write( + "info: skipping data file '%s' - not found in archive %s\n" + % (file, stadir) + ) + continue + + part = file[:3] + if cha != "*": + mr = re.match(cha, part) + if not mr or mr.group() != part: + continue + + tmp_its = self.iterators(begin, end, net, sta, loc, part) + for it in tmp_its: + its.append(it) + + return its + + if loc == "*": + dir = ( + self.archiveDirectory + + str(year) + + "/" + + net + + "/" + + sta + + "/" + + cha + + ".D/" + ) + its = [] + + start_day = t[7] + if t_end[0] > year: + end_day = 366 + else: + end_day = t_end[7] + + files = glob.glob(dir + "*.%03d" % start_day) + + # Find first day with data + while not files and start_day <= end_day: + start_day += 1 + begin = seiscomp.core.Time.FromYearDay(year, start_day) + files = glob.glob(dir + "*.%03d" % start_day) + + if not files: + t = time.gmtime(begin.seconds() - 86400) + sys.stderr.write( + "info: skipping streams '%s.%s.*.%s on %s '" + "- no data found for this day in archive %s\n" + % (net, sta, cha, time.strftime("%Y-%m-%d", t), dir) + ) + + for file in files: + file = file.split("/")[-1] + if not os.path.isfile(dir + file): + sys.stderr.write( + "info: skipping data file '%s' - not found in archive %s\n" + % (file, dir) + ) + continue + + tmp_its = self.iterators( + begin, end, net, sta, file.split(".")[2], cha + ) + for it in tmp_its: + its.append(it) + + return its + + it = StreamIterator(self, begin, end, net, sta, loc, cha) + if it.record is not None: + return [it] + + return [] + + def location(self, rt, net, sta, loc, cha): + t = time.gmtime(rt.seconds()) + dir = str(t[0]) + "/" + net + "/" + sta + "/" + cha + ".D/" + file = ( + net + "." + sta + "." + loc + "." + cha + ".D." + str(t[0]) + ".%03d" % t[7] + ) + return dir, file + + def findIndex(self, begin, end, file): + rs = seiscomp.io.FileRecordStream() + rs.setRecordType("mseed") + if not rs.setSource(self.archiveDirectory + file): + return None, None + + ri = seiscomp.io.RecordInput(rs) + + index = None + retRec = None + + for rec in ri: + if rec is None: + break + + if rec.samplingFrequency() <= 0: + continue + + if rec.startTime() >= end: + break + if rec.endTime() < begin: + continue + + index = rs.tell() + retRec = rec + break + + rs.close() + + return retRec, index + + def readRecord(self, file, index): + try: + rs = self.filePool[file] + except BaseException: + rs = seiscomp.io.FileRecordStream() + rs.setRecordType("mseed") + if not rs.setSource(self.archiveDirectory + file): + return (None, None) + + rs.seek(index) + + # Remove old handles + if len(self.filePool) < self.filePoolSize: + # self.filePool.pop(self.fileList[-1]) + # print "Remove %s from filepool" % self.fileList[-1] + # del self.fileList[-1] + self.filePool[file] = rs + + ri = seiscomp.io.RecordInput( + rs, seiscomp.core.Array.INT, seiscomp.core.Record.SAVE_RAW + ) + # Read only valid records + while True: + rec = next(ri) + if rec is None: + break + if rec.samplingFrequency() <= 0: + continue + break + + index = rs.tell() + + if rec is None: + # Remove file handle from pool + rs.close() + try: + self.filePool.pop(file) + except BaseException: + pass + + return rec, index + + def stepTime(self, rt): + rt = rt + seiscomp.core.TimeSpan(86400) + t = rt.get() + rt.set(t[1], t[2], t[3], 0, 0, 0, 0) + return rt + + +class StreamIterator: + def __init__(self, ar, begin, end, net, sta, loc, cha): + self.archive = ar + + self.begin = begin + self.end = end + + self.net = net + self.sta = sta + self.loc = loc + self.cha = cha + + self.compareEndTime = False + + workdir, file = ar.location(begin, net, sta, loc, cha) + self.file = workdir + file + # print "Starting at file %s" % self.file + + self.record, self.index = ar.findIndex(begin, end, self.file) + if self.record: + self.current = self.record.startTime() + self.currentEnd = self.record.endTime() + + def next(self): + # needed for Python 2 only + return self.__next__() + + def __next__(self): + while True: + self.record, self.index = self.archive.readRecord(self.file, self.index) + if self.record: + self.current = self.record.startTime() + self.currentEnd = self.record.endTime() + if self.current >= self.end: + self.record = None + return self.record + else: + # Skip the current day file + self.current = self.archive.stepTime(self.current) + # Are we out of scope? + if self.current >= self.end: + self.record = None + return self.record + + # Use the new file and start from the beginning + workdir, file = self.archive.location( + self.current, self.net, self.sta, self.loc, self.cha + ) + self.file = workdir + file + self.index = 0 + + def __cmp__(self, other): + if self.compareEndTime: + if self.currentEnd > other.currentEnd: + return 1 + elif self.currentEnd < other.currentEnd: + return -1 + return 0 + else: + if self.current > other.current: + return 1 + elif self.current < other.current: + return -1 + return 0 + + def __lt__(self, other): + if self.__cmp__(other) < 0: + return True + return False + + +class ArchiveIterator: + def __init__(self, ar, sortByEndTime): + self.archive = ar + self.streams = [] + self.sortByEndTime = sortByEndTime + + def append(self, beginTime, endTime, net, sta, loc, cha): + its = self.archive.iterators(beginTime, endTime, net, sta, loc, cha) + for it in its: + it.compareEndTime = self.sortByEndTime + bisect.insort(self.streams, it) + + def appendStation(self, beginTime, endTime, net, sta): + self.append(beginTime, endTime, net, sta, "*", "*") + + def nextSort(self): + if not self.streams: + return None + + stream = self.streams.pop(0) + + rec = stream.record + + next(stream) + + if stream.record is not None: + # Put the stream back on the right (sorted) position + bisect.insort(self.streams, stream) + + return rec + + +class Copy: + def __init__(self, archiveIterator): + self.archiveIterator = archiveIterator + + def __iter__(self): + for stream in self.archiveIterator.streams: + rec = stream.record + while rec: + yield rec + rec = next(stream) + + +class Sorter: + def __init__(self, archiveIterator): + self.archiveIterator = archiveIterator + + def __iter__(self): + while True: + rec = self.archiveIterator.nextSort() + if not rec: + return + yield rec + + +#################################################################### +## +# Application block +## +#################################################################### + + +def checkFile(fileName): + """ + Check the miniSEED records in a file, report unsorted records. + + Parameters + ---------- + fileName : miniSEED + Waveform file to check. + + Returns + ------- + false + If no error is found in file + error string + If file or records are corrupted + + """ + rs = seiscomp.io.FileRecordStream() + rs.setRecordType("mseed") + + if not rs.setSource(fileName): + return "cannot read file" + + ri = seiscomp.io.RecordInput(rs) + lastEnd = None + for rec in ri: + if rec is None: + continue + + sF = rec.samplingFrequency() + if sF <= 0: + continue + + if lastEnd and rec.endTime() <= lastEnd: + overlap = float(lastEnd - rec.endTime()) + + if overlap >= 1 / sF: + errorMsg = ( + "new record ends at or before end of last record: %s < %s" + % (rec.startTime(), lastEnd) + ) + return errorMsg + + lastEnd = rec.endTime() + + return False + + +def checkFilePrint(fileName, streamDict): + """ + Check the miniSEED records in a file, report NSLC along with parameters + + Parameters + ---------- + fileName : miniSEED + Waveform file to check. + + Returns + ------- + false + If no error is found in file + error string + If file or records are corrupted + + """ + rs = seiscomp.io.FileRecordStream() + rs.setRecordType("mseed") + + if not rs.setSource(fileName): + return "cannot read file" + + ri = seiscomp.io.RecordInput(rs) + for rec in ri: + if rec is None: + continue + + stream = f"{rec.networkCode()}.{rec.stationCode()}.{rec.locationCode()}.{rec.channelCode()}" + recStart = rec.startTime() + recEnd = rec.endTime() + + if stream in streamDict: + streamStart = streamDict[stream][0] + streamEnd = streamDict[stream][1] + streamNRec = streamDict[stream][2] + streamNSamp = streamDict[stream][3] + if recStart.valid() and recStart.iso() < streamStart: + # update start time + streamDict.update( + { + stream: ( + recStart.iso(), + streamEnd, + streamNRec + 1, + streamNSamp + rec.data().size(), + rec.samplingFrequency() + ) + } + ) + if recEnd.valid() and recEnd.iso() > streamEnd: + # update end time + streamDict.update( + { + stream: ( + streamStart, + recEnd.iso(), + streamNRec + 1, + streamNSamp + rec.data().size(), + rec.samplingFrequency() + ) + } + ) + else: + # add stream for the first time + streamDict[stream] = ( + recStart.iso(), + recEnd.iso(), + 1, + rec.data().size(), + rec.samplingFrequency() + ) + + return True + + +def str2time(timestring): + """ + Liberally accept many time string formats and convert them to a + seiscomp.core.Time + """ + + timestring = timestring.strip() + for c in ["-", "/", ":", "T", "Z"]: + timestring = timestring.replace(c, " ") + timestring = timestring.split() + try: + assert 3 <= len(timestring) <= 6 + except AssertionError: + print( + "error: Provide a valid time format, e.g.: 'YYYY-MM-DD hh:mm:ss'", + file=sys.stderr, + ) + sys.exit(1) + + timestring.extend((6 - len(timestring)) * ["0"]) + timestring = " ".join(timestring) + format = "%Y %m %d %H %M %S" + if timestring.find(".") != -1: + format += ".%f" + + t = seiscomp.core.Time() + t.fromString(timestring, format) + return t + + +def time2str(time): + """ + Convert a seiscomp.core.Time to a string + """ + return time.toString("%Y-%m-%d %H:%M:%S.%2f") + + +def create_dir(dir): + if os.access(dir, os.W_OK): + return True + + try: + os.makedirs(dir) + return True + except BaseException: + return False + + +def isFile(url): + toks = url.split("://") + return len(toks) < 2 or toks[0] == "file" + + +def readStreamList(listFile): + """ + Read list of streams from file + + Parameters + ---------- + file : file + Input list file, one line per stream + format: NET.STA.LOC.CHA + + Returns + ------- + list + streams. + + """ + streams = [] + + try: + if listFile == "-": + f = sys.stdin + listFile = "stdin" + else: + f = open(listFile, "r") + except Exception: + print("error: unable to open '{}'".format(listFile), file=sys.stderr) + return [] + + lineNumber = -1 + for line in f: + lineNumber = lineNumber + 1 + line = line.strip() + # ignore comments + if len(line) > 0 and line[0] == "#": + continue + + if len(line) == 0: + continue + + toks = line.split(".") + if len(toks) != 4: + f.close() + print( + "error: %s in line %d has invalid line format, expecting " + "NET.STA.LOC.CHA - 1 line per stream" % (listFile, lineNumber), + file=sys.stderr, + ) + return [] + + streams.append((toks[0], toks[1], toks[2], toks[3])) + + f.close() + + if len(streams) == 0: + return [] + + return streams + + +def readStreamTimeList(listFile): + """ + Read list of streams with time windows + + Parameters + ---------- + file : file + Input list file, one line per stream + format: 2007-03-28 15:48;2007-03-28 16:18;NET.STA.LOC.CHA + + Returns + ------- + list + streams. + + """ + streams = [] + + try: + if listFile == "-": + f = sys.stdin + listFile = "stdin" + else: + f = open(listFile, "r") + except BaseException: + sys.stderr.write("error: unable to open '{}'".format(listFile), file=sys.stderr) + return [] + + lineNumber = -1 + for line in f: + lineNumber = lineNumber + 1 + line = line.strip() + # ignore comments + if not line or line[0] == "#": + continue + + toks = line.split(";") + if len(toks) != 3: + f.close() + sys.stderr.write( + "%s:%d: error: invalid line format, expected 3 " + "items separated by ';'\n" % (listFile, lineNumber) + ) + return [] + + try: + tMin = str2time(toks[0].strip()) + except BaseException: + f.close() + sys.stderr.write( + "%s:%d: error: invalid time format (tmin)\n" % (listFile, lineNumber) + ) + return [] + + try: + tMax = str2time(toks[1].strip()) + except BaseException: + f.close() + sys.stderr.write( + "%s:%d: error: invalid time format (tMax)\n" % (listFile, lineNumber) + ) + return [] + + streamID = toks[2].strip() + toks = streamID.split(".") + if len(toks) != 4: + f.close() + sys.stderr.write( + "%s:%d: error: invalid stream format\n" % (listFile, lineNumber) + ) + return [] + + streams.append((tMin, tMax, toks[0], toks[1], toks[2], toks[3])) + + f.close() + + return streams + + +usage_info = """ +Usage: + scart [options] [archive] + +Import miniSEED waveforms or dump records from an SDS structure, sort them, +modify the time and replay them. Also check files and archives. + +Verbosity: + -h, --help Display this help message. + -v, --verbose Print verbose information. + +Mode: + --check arg Check mode: Check all files in the given directory structure + for erroneous miniSEED records. If no directory is given, + $SEISCOMP_ROOT/var/lib/archive is scanned. + -d, --dump Export (dump) mode. Read from SDS archive. + -I arg Import mode: Specify a recordstream URL when in import mode. + When using another recordstream than file a + stream list file is needed. + Default: file://- (stdin) + +Output: + -c arg Channel filter (regular expression). + Default: "(B|E|H|M|S)(D|H|L|N)(E|F|N|Z|1|2|3)" + -E Sort according to record end time; default is start time + --files arg Specify the file handles to cache; default: 100 + -l, --list arg Use a stream list file instead of defined networks and + channels (-n and -c are ignored). The list can be generated + from events by scevtstreams. One line per stream + Line format: starttime;endtime;streamID + 2007-03-28 15:48;2007-03-28 16:18;GE.LAST.*.* + 2007-03-28 15:48;2007-03-28 16:18;GE.PMBI..BH? + -m, --modify Modify the record time for realtime playback when dumping. + -n arg Network code list (comma separated). Default: * + --nslc arg Use a stream list file for filtering the data by the given + streams. For dump mode only! One line per stream. + Format: NET.STA.LOC.CHA + -s, --sort Sort records. + --speed arg Specify the speed to dump the records. A value of 0 means + no delay. Otherwise speed is a multiplier of the real time + difference between the records. + --stdout Writes to stdout if import mode is used instead + of creating a SDS archive. + --print-streams Print stream information only and exit. Works in import, dump and + check mode. Output: NET.STA.LOC.CHA StartTime EndTime. + -t t1~t2 Specify time window (as one properly quoted string) + times are of course UTC and separated by a tilde '~' . + --test Test only, no record output. + --with-filecheck Check all accessed files after import. Unsorted or + unreadable files are reported to stderr. + --with-filename Print all accessed files to stdout after import. + +Examples: +Read from /archive, create a miniSEED file where records are sorted by end time + scart -dsv -t '2007-03-28 15:48~2007-03-28 16:18' /archive > sorted.mseed + +Import miniSEED data from file [your file], create a SDS archive + scart -I file.mseed $SEISCOMP_ROOT/var/lib/archive + +Import miniSEED data into a SDS archive, check all modified files for errors + scart -I file.mseed --with-filecheck $SEISCOMP_ROOT/var/lib/archive + +Check an archive for files with out-of order records + scart --check /archive +""" + + +def usage(exitcode=0): + sys.stderr.write(usage_info) + sys.exit(exitcode) + + +try: + opts, files = gnu_getopt( + sys.argv[1:], + "I:dsmEn:c:t:l:hv", + [ + "stdout", + "with-filename", + "with-filecheck", + "dump", + "list=", + "nslc=", + "sort", + "modify", + "speed=", + "files=", + "verbose", + "test", + "help", + "check", + "print-streams", + ], + ) +except GetoptError: + usage(exitcode=1) + + +tmin = None +tmax = None +endtime = False +verbose = False +sort = False +modifyTime = False +dump = False +listFile = None +nslcFile = None +printStreams = False +withFilename = False # Whether to output accessed files for import or not +checkFiles = False # Check if output files are sorted by time +checkSDS = False # check the SDS archive for errors in files +test = False +filePoolSize = 100 +# default = stdin +recordURL = "file://-" + +speed = 0 +stdout = False + +channels = "(B|E|H|M|S)(D|H|L|N)(E|F|N|Z|1|2|3)" +networks = "*" + +archiveDirectory = "./" + + +for flag, arg in opts: + if flag == "-t": + try: + tmin, tmax = list(map(str2time, arg.split("~"))) + except ValueError as e: + print("error: {}".format(e), file=sys.stderr) + print( + " Provide correct time interval: -t 'startTime~endtime'", + file=sys.stderr, + ) + sys.exit(1) + + elif flag == "-E": + endtime = True + elif flag in ["-h", "--help"]: + usage(exitcode=0) + elif flag in ["--check"]: + checkSDS = True + elif flag in ["--stdout"]: + stdout = True + elif flag in ["--with-filename"]: + withFilename = True + elif flag in ["--with-filecheck"]: + checkFiles = True + elif flag in ["-v", "--verbose"]: + verbose = True + elif flag in ["-d", "--dump"]: + dump = True + elif flag in ["-l", "--list"]: + listFile = arg + elif flag in ["--nslc"]: + nslcFile = arg + elif flag in ["--print-streams"]: + printStreams = True + elif flag in ["-s", "--sort"]: + sort = True + elif flag in ["-m", "--modify"]: + modifyTime = True + elif flag in ["--speed"]: + speed = float(arg) + elif flag in ["--files"]: + filePoolSize = int(arg) + elif flag in ["--test"]: + test = True + elif flag == "-I": + recordURL = arg + elif flag == "-n": + networks = arg + elif flag == "-c": + channels = arg + else: + usage(exitcode=1) + + +if files: + archiveDirectory = files[0] +else: + try: + archiveDirectory = os.environ["SEISCOMP_ROOT"] + "/var/lib/archive" + except BaseException: + pass + +try: + if archiveDirectory[-1] != "/": + archiveDirectory = archiveDirectory + "/" +except BaseException: + pass + +if not stdout and not os.path.isdir(archiveDirectory): + sys.stderr.write( + "info: archive directory '%s' not found - stopping\n" % archiveDirectory + ) + sys.exit(-1) + +archive = Archive(archiveDirectory) +archive.filePoolSize = filePoolSize + +if verbose: + seiscomp.logging.enableConsoleLogging(seiscomp.logging.getAll()) + + if dump and not listFile: + if not tmin or not tmax: + print( + "info: provide a time window with '-t' or '-l' when using " + "'-d' - stopping", + file=sys.stderr, + ) + sys.exit(-1) + if tmin >= tmax: + print( + "info: start time '{}' after end time '{}' - stopping".format( + time2str(tmin), time2str(tmax) + ), + file=sys.stderr, + ) + sys.exit(-1) + + sys.stderr.write("Time window: %s~%s\n" % (time2str(tmin), time2str(tmax))) + + sys.stderr.write("Archive: %s\n" % archiveDirectory) + if dump: + if not sort and not modifyTime: + sys.stderr.write("Mode: DUMP\n") + elif sort and not modifyTime: + sys.stderr.write("Mode: DUMP & SORT\n") + elif not sort and modifyTime: + sys.stderr.write("Mode: DUMP & MODIFY_TIME\n") + elif sort and modifyTime: + sys.stderr.write("Mode: DUMP & SORT & MODIFY_TIME\n") + else: + sys.stderr.write("Mode: IMPORT\n") + +archiveIterator = ArchiveIterator(archive, endtime) + +if checkSDS: + dump = False + stdout = False + +if dump: + stdout = True + +if stdout: + out = sys.stdout + try: + # needed in Python 3, fails in Python 2 + out = out.buffer + except AttributeError: + # assuming this is Python 2, nothing to be done + pass + +# list file witht times takes priority over nslc list +if listFile: + nslcFile = None + +streamDict = {} +if dump: + if listFile: + print("Stream file: '{}'".format(listFile), file=sys.stderr) + streams = readStreamTimeList(listFile) + for stream in streams: + if stream[0] >= stream[1]: + print( + "info: ignoring {}.{}.{}.{} - start {} after end {}".format( + stream[2], stream[3], stream[4], stream[5], stream[0], stream[1] + ), + file=sys.stderr, + ) + continue + + if verbose: + print( + "Adding stream to list: {}.{}.{}.{} {} - {}".format( + stream[2], stream[3], stream[4], stream[5], stream[0], stream[1] + ), + file=sys.stderr, + ) + archiveIterator.append( + stream[0], stream[1], stream[2], stream[3], stream[4], stream[5] + ) + + elif nslcFile: + print("Stream file: '{}'".format(nslcFile), file=sys.stderr) + streams = readStreamList(nslcFile) + for stream in streams: + if verbose: + print( + "Adding stream to list: {}.{}.{}.{} {} - {}".format( + stream[0], stream[1], stream[2], stream[3], tmin, tmax + ), + file=sys.stderr, + ) + archiveIterator.append( + tmin, tmax, stream[0], stream[1], stream[2], stream[3] + ) + + else: + if networks == "*": + archiveIterator.append(tmin, tmax, "*", "*", "*", channels) + else: + items = networks.split(",") + for n in items: + n = n.strip() + archiveIterator.append(tmin, tmax, n, "*", "*", channels) + + stime = None + realTime = seiscomp.core.Time.GMT() + + if sort: + records = Sorter(archiveIterator) + else: + records = Copy(archiveIterator) + + foundRecords = 0 + for rec in records: + # skip corrupt records + etime = seiscomp.core.Time(rec.endTime()) + + if stime is None: + stime = etime + if verbose: + sys.stderr.write("First record: %s\n" % stime.iso()) + + dt = etime - stime + + now = seiscomp.core.Time.GMT() + + if speed > 0: + playTime = (realTime + dt).toDouble() / speed + else: + playTime = now.toDouble() + + sleepTime = playTime - now.toDouble() + if sleepTime > 0: + time.sleep(sleepTime) + + if modifyTime: + recLength = etime - rec.startTime() + rec.setStartTime(seiscomp.core.Time(playTime) - recLength) + + if verbose: + etime = rec.endTime() + print( + "{} time current: {} start: {} end: {}".format( + rec.streamID(), + seiscomp.core.Time.LocalTime().iso(), + rec.startTime().iso(), + etime.iso(), + ), + file=sys.stderr, + ) + + if printStreams: + stream = f"{rec.networkCode()}.{rec.stationCode()}.{rec.locationCode()}.{rec.channelCode()}" + recStart = rec.startTime() + recEnd = rec.endTime() + + if stream in streamDict: + streamStart = streamDict[stream][0] + streamEnd = streamDict[stream][1] + streamNRec = streamDict[stream][2] + streamNSamp = streamDict[stream][3] + if recStart.valid() and recStart.iso() < streamStart: + # update start time + streamDict.update( + { + stream: ( + recStart.iso(), + streamEnd, + streamNRec + 1, + streamNSamp + rec.data().size(), + rec.samplingFrequency() + ) + } + ) + if recEnd.valid() and recEnd.iso() > streamEnd: + # update end time + streamDict.update( + { + stream: ( + streamStart, + recEnd.iso(), + streamNRec + 1, + streamNSamp + rec.data().size(), + rec.samplingFrequency() + ) + } + ) + else: + # add stream for the first time + streamDict[stream] = ( + recStart.iso(), + recEnd.iso(), + 1, + rec.data().size(), + rec.samplingFrequency() + ) + + if not test and not printStreams: + out.write(rec.raw().str()) + + foundRecords += 1 + + if verbose: + print("Found records: {}".format(foundRecords), file=sys.stderr) + + if test: + print("Test mode: no records written", file=sys.stderr) + +elif checkSDS: + foundIssues = 0 + checkedFiles = 0 + for path, subdirs, files in os.walk(archiveDirectory): + for name in files: + fileName = os.path.join(path, name) + checkedFiles += 1 + + if printStreams: + # only collect stream IDs + checkFilePrint(fileName, streamDict) + continue + + issueFound = checkFile(fileName) + if issueFound: + foundIssues += 1 + print("{} has an issue".format(fileName), file=sys.stderr) + print(" + " + issueFound, file=sys.stderr) + + if not printStreams: + print( + "Found issues in {}/{} files".format(foundIssues, checkedFiles), + file=sys.stderr, + ) + +else: + env = seiscomp.system.Environment.Instance() + cfg = seiscomp.config.Config() + env.initConfig(cfg, "scart") + try: + plugins = cfg.getStrings("plugins") + registry = seiscomp.system.PluginRegistry.Instance() + for p in plugins: + registry.addPluginName(p) + registry.loadPlugins() + except Exception: + pass + + rs = seiscomp.io.RecordStream.Open(recordURL) + if rs is None: + sys.stderr.write("Unable to open recordstream '%s'\n" % recordURL) + sys.exit(-1) + + if not rs.setRecordType("mseed"): + sys.stderr.write( + "Format 'mseed' is not supported by recordstream '%s'\n" % recordURL + ) + sys.exit(-1) + + if not isFile(recordURL): + if not listFile: + sys.stderr.write( + "A stream list is needed to fetch data from another source than a file\n" + ) + sys.exit(-1) + + streams = readStreamTimeList(listFile) + for stream in streams: + # Add stream to recordstream + if not rs.addStream( + stream[2], stream[3], stream[4], stream[5], stream[0], stream[1] + ): + if verbose: + sys.stderr.write( + "error: adding stream: %s %s %s.%s.%s.%s\n" + % ( + stream[0], + stream[1], + stream[2], + stream[3], + stream[4], + stream[5], + ) + ) + else: + if verbose: + sys.stderr.write( + "adding stream: %s %s %s.%s.%s.%s\n" + % ( + stream[0], + stream[1], + stream[2], + stream[3], + stream[4], + stream[5], + ) + ) + + input = seiscomp.io.RecordInput( + rs, seiscomp.core.Array.INT, seiscomp.core.Record.SAVE_RAW + ) + filePool = dict() + f = None + accessedFiles = set() + try: + for rec in input: + if printStreams: + stream = f"{rec.networkCode()}.{rec.stationCode()}.{rec.locationCode()}.{rec.channelCode()}" + recStart = rec.startTime() + recEnd = rec.endTime() + + if stream in streamDict: + streamStart = streamDict[stream][0] + streamEnd = streamDict[stream][1] + streamNRec = streamDict[stream][2] + streamNSamp = streamDict[stream][3] + if recStart.valid() and recStart.iso() < streamStart: + # update start time + streamDict.update( + { + stream: ( + recStart.iso(), + streamEnd, + streamNRec + 1, + streamNSamp + rec.data().size(), + rec.samplingFrequency() + ) + } + ) + if recEnd.valid() and recEnd.iso() > streamEnd: + # update end time + streamDict.update( + { + stream: ( + streamStart, + recEnd.iso(), + streamNRec + 1, + streamNSamp + rec.data().size(), + rec.samplingFrequency() + ) + } + ) + else: + # add stream for the first time + streamDict[stream] = ( + recStart.iso(), + recEnd.iso(), + 1, + rec.data().size(), + rec.samplingFrequency() + ) + + continue + + if stdout: + out.write(rec.raw().str()) + continue + + dir, file = archive.location( + rec.startTime(), + rec.networkCode(), + rec.stationCode(), + rec.locationCode(), + rec.channelCode(), + ) + file = dir + file + + if not test: + try: + f = filePool[file] + except BaseException: + outdir = "/".join((archiveDirectory + file).split("/")[:-1]) + if not create_dir(outdir): + sys.stderr.write("Could not create directory '%s'\n" % outdir) + sys.exit(-1) + + try: + f = open(archiveDirectory + file, "ab") + except BaseException: + sys.stderr.write( + "File '%s' could not be opened for writing\n" + % (archiveDirectory + file) + ) + sys.exit(-1) + + # Remove old handles + if len(filePool) < filePoolSize: + filePool[file] = f + + if withFilename or checkFiles: + accessedFiles.add(archiveDirectory + file) + f.write(rec.raw().str()) + else: + if withFilename or checkFiles: + accessedFiles.add(archiveDirectory + file) + + if verbose: + sys.stderr.write( + "%s %s %s\n" % (rec.streamID(), rec.startTime().iso(), file) + ) + except Exception as e: + sys.stderr.write("Exception: %s\n" % str(e)) + + if checkFiles: + print("Testing accessed files (may take some time):", file=sys.stderr) + foundIssues = 0 + checkedFiles = 0 + for fileName in accessedFiles: + checkedFiles += 1 + issueFound = checkFile(fileName) + if issueFound: + foundIssues += 1 + print("{} has an issue".format(fileName), file=sys.stderr) + print(" + " + issueFound, file=sys.stderr) + + print( + "Found issues in {}/{} files".format(foundIssues, checkedFiles), + file=sys.stderr, + ) + + if withFilename: + if verbose: + print("List of accessed files:", file=sys.stderr) + for fileName in accessedFiles: + print(fileName, file=sys.stdout) + +if len(streamDict) > 0: + print( + "# streamID start end records samples samplingRate", + file=sys.stdout, + ) + for key, (start, end, nRecs, nSamples, sps) in sorted(streamDict.items()): + print( + f"{key: <{16}} {start: <{27}} {end: <{27}} {nRecs} {nSamples} {sps}", + file=sys.stdout, + ) diff --git a/bin/scautoloc b/bin/scautoloc new file mode 100755 index 0000000..14f796c Binary files /dev/null and b/bin/scautoloc differ diff --git a/bin/scautopick b/bin/scautopick new file mode 100755 index 0000000..8a1a977 Binary files /dev/null and b/bin/scautopick differ diff --git a/bin/scbulletin b/bin/scbulletin new file mode 100755 index 0000000..e343a81 --- /dev/null +++ b/bin/scbulletin @@ -0,0 +1,19 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +import seiscomp.scbulletin + +if __name__ == "__main__": + seiscomp.scbulletin.main() diff --git a/bin/scchkcfg b/bin/scchkcfg new file mode 100755 index 0000000..62339a1 Binary files /dev/null and b/bin/scchkcfg differ diff --git a/bin/sccnv b/bin/sccnv new file mode 100755 index 0000000..a7fb8ab Binary files /dev/null and b/bin/sccnv differ diff --git a/bin/scconfig b/bin/scconfig new file mode 100755 index 0000000..de32e7f Binary files /dev/null and b/bin/scconfig differ diff --git a/bin/scdb b/bin/scdb new file mode 100755 index 0000000..b73a533 Binary files /dev/null and b/bin/scdb differ diff --git a/bin/scdbstrip b/bin/scdbstrip new file mode 100755 index 0000000..133248b --- /dev/null +++ b/bin/scdbstrip @@ -0,0 +1,1042 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +from __future__ import absolute_import, division, print_function + +import re +import sys +import traceback +import seiscomp.core +import seiscomp.client +import seiscomp.logging +import seiscomp.utils + +output = sys.stdout +error = sys.stderr + + +class RuntimeException(Exception): + def __init__(self, what): + self.what = what + + def __str__(self): + return str(self.what) + + +class ExitRequestException(RuntimeException): + def __init__(self): + pass + + def __str__(self): + return "exit requested" + + +class QueryInterface: + def __init__(self, database): + self._database = database + + def cnvCol(self, col): + return self._database.convertColumnName(col) + + def getTables(self): + return [] + + def deleteObjectQuery(self, *v): + return "" + + def deleteJournalQuery(self, *v): + return "" + + def childQuery(self, mode, *v): + return "" + + def childJournalQuery(self, mode, *v): + return "" + + +class MySQLDB(QueryInterface): + def __init__(self, database): + QueryInterface.__init__(self, database) + + def getTables(self): + tmp_tables = [] + if not self._database.beginQuery("show tables"): + return tmp_tables + + while self._database.fetchRow(): + tmp_tables.append(self._database.getRowFieldString(0)) + + self._database.endQuery() + return tmp_tables + + def deleteObjectQuery(self, *v): + if v[0]: + q = "delete " + v[0] + " from " + ", ".join(v) + " where " + \ + v[0] + "._oid=" + v[1] + "._oid and " + else: + q = "delete " + v[1] + " from " + ", ".join(v[1:]) + " where " + + for i in range(1, len(v)-1): + if i > 1: + q += " and " + q += v[i] + "._oid=" + v[i+1] + "._oid" + + return q + + def deleteJournalQuery(self, *v): + q = "delete JournalEntry from JournalEntry, " + ", ".join(v) + " where " + \ + v[0] + "._oid=" + v[1] + "._oid" + + for i in range(1, len(v)-1): + q += " and " + v[i] + "._oid=" + v[i+1] + "._oid" + + q += " and JournalEntry.objectID=PublicObject.publicID" + + return q + + def childQuery(self, mode, *v): + if v[0]: + if mode == "delete": + q = "delete " + v[0] + elif mode == "count": + q = "select count(*)" + else: + return "" + + q += " from " + ", ".join(v) + " where " + \ + v[0] + "._oid=" + v[1] + "._oid and " + else: + if mode == "delete": + q = "delete " + v[1] + elif mode == "count": + q = "select count(*)" + else: + return "" + + q += " from " + ", ".join(v[1:]) + " where " + + for i in range(1, len(v)-1): + if i > 1: + q += " and " + q += v[i] + "._parent_oid=" + v[i+1] + "._oid" + + return q + + def childJournalQuery(self, mode, *v): + if v[0]: + if mode == "delete": + q = "delete JournalEntry" + elif mode == "count": + q = "select count(*)" + else: + return "" + + q += " from JournalEntry, " + ", ".join(v) + " where " + \ + v[0] + "._oid=" + v[1] + "._oid and " + else: + if mode == "delete": + q = "delete " + v[1] + elif mode == "count": + q = "select count(*)" + else: + return "" + + q += " from JournalEntry, " + ", ".join(v[1:]) + " where " + + for i in range(1, len(v)-1): + if i > 1: + q += " and " + q += v[i] + "._parent_oid=" + v[i+1] + "._oid" + + q += " and JournalEntry.objectID=PublicObject.publicID" + return q + + +class PostgresDB(QueryInterface): + def __init__(self, database): + QueryInterface.__init__(self, database) + + def getTables(self): + tmp_tables = [] + if not self._database.beginQuery("SELECT table_name FROM information_schema.tables WHERE table_type = 'BASE TABLE' AND table_schema NOT IN ('pg_catalog', 'information_schema');"): + return tmp_tables + + while self._database.fetchRow(): + tmp_tables.append(self._database.getRowFieldString(0)) + + self._database.endQuery() + return tmp_tables + + def deleteObjectQuery(self, *v): + if v[0]: + q = "delete from " + v[0] + " using " + ", ".join(v[1:]) + " where " + \ + v[0] + "._oid=" + v[1] + "._oid and " + else: + q = "delete from " + v[1] + " using " + \ + ", ".join(v[2:]) + " where " + + for i in range(1, len(v)-1): + if i > 1: + q += " and " + q += v[i] + "._oid=" + v[i+1] + "._oid" + + return q + + def deleteJournalQuery(self, *v): + q = "delete from JournalEntry using " + ", ".join(v) + " where " + \ + v[0] + "._oid=" + v[1] + "._oid" + + for i in range(1, len(v)-1): + q += " and " + v[i] + "._oid=" + v[i+1] + "._oid" + + q += " and JournalEntry." + \ + self.cnvCol("objectID") + "=PublicObject." + \ + self.cnvCol("publicID") + + return q + + def childQuery(self, mode, *v): + if v[0]: + if mode == "delete": + q = "delete from " + v[0] + " using " + ", ".join(v[1:]) + elif mode == "count": + q = "select count(*) from " + ", ".join(v) + else: + return "" + + q += " where " + \ + v[0] + "._oid=" + v[1] + "._oid and " + else: + if mode == "delete": + q = "delete from " + v[1] + " using " + ", ".join(v[2:]) + elif mode == "count": + q = "select count(*) from " + ", ".join(v[1:]) + else: + return "" + + q += " where " + + for i in range(1, len(v)-1): + if i > 1: + q += " and " + q += v[i] + "._parent_oid=" + v[i+1] + "._oid" + + return q + + def childJournalQuery(self, mode, *v): + if v[0]: + if mode == "delete": + q = "delete from JournalEntry using " + elif mode == "count": + q = "select count(*) from " + else: + return "" + + q += ", ".join(v) + " where " + \ + v[0] + "._oid=" + v[1] + "._oid and " + else: + if mode == "delete": + q = "delete from " + v[1] + " using " + elif mode == "count": + q = "select count(*) from " + else: + return "" + + q += " JournalEntry, " + ", ".join(v[1:]) + " where " + + for i in range(1, len(v)-1): + if i > 1: + q += " and " + q += v[i] + "._parent_oid=" + v[i+1] + "._oid" + + q += " and JournalEntry." + \ + self.cnvCol("objectID") + "=PublicObject." + \ + self.cnvCol("publicID") + return q + + +class DBCleaner(seiscomp.client.Application): + def __init__(self, argc, argv): + seiscomp.client.Application.__init__(self, argc, argv) + + self.setMessagingEnabled(False) + self.setDatabaseEnabled(True, True) + self.setDaemonEnabled(False) + + self._daysToKeep = None + self._hoursToKeep = None + self._minutesToKeep = None + self._datetime = None + self._invertMode = False + self._stripEP = True + self._stripQC = True + + self._steps = 0 + self._currentStep = 0 + self._keepEvents = [] + + self._timer = seiscomp.utils.StopWatch() + + def createCommandLineDescription(self): + try: + try: + self.commandline().addGroup("Mode") + self.commandline().addOption("Mode", "check", "Checks if " + "unreachable objects exist.") + self.commandline().addOption("Mode", "clean-unused", + "Remove all unreachable objects " + "when in checkmode. Default: off.") + + self.commandline().addGroup("Objects") + + self.commandline().addOption("Objects", "ep-only,E", + "Strip only event parameters" + " but no waveform QC.") + self.commandline().addStringOption("Objects", "keep-events", + "Event-IDs to keep in the " + "database. Combining with" + "'qc-only' is invalld.") + self.commandline().addOption("Objects", "qc-only,Q", + "Strip only waveform QC but no " + "event parameters. Combining with" + "'ep-only' is invalld.") + + self.commandline().addGroup("Timespan") + self.commandline().addStringOption("Timespan", "datetime", + "Specify the datetime (UTC)" + " from which to keep all " + "events. If given, days, " + "minutes and hours are ignored. " + "Format: '%Y-%m-%d %H:%M:%S'.") + self.commandline().addIntOption("Timespan", "days", + "The number of days to keep. " + "Added to hours and minutes. " + "Default is 30 if no other " + "times are given.") + self.commandline().addIntOption("Timespan", "hours", + "The number of hours to keep. " + "Added to days and minutes.") + self.commandline().addIntOption("Timespan", "minutes", + "The number of minutes to keep. " + "Added to days and hours.") + self.commandline().addOption("Timespan", "invert,i", + "Delete all parameters after the " + "specified time period. If not " + "given, parameter from before are" + " deleted.") + + except RuntimeError: + seiscomp.logging.warning( + "caught unexpected error %s" % sys.exc_info()) + return True + except RuntimeError: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + sys.exit(-1) + + def initConfiguration(self): + try: + if not seiscomp.client.Application.initConfiguration(self): + return False + try: + self._invertMode = self.configGetBool( + "database.cleanup.invertMode") + except RuntimeError: + pass + + try: + if self.configGetBool("database.cleanup.eventParameters"): + self._stripEP = True + else: + self._stripEP = False + except RuntimeError: + pass + + try: + if self.configGetBool("database.cleanup.qualityControl"): + self._stripQC = True + else: + self._stripQC = False + except RuntimeError: + pass + + try: + self._daysToKeep = self.configGetInt( + "database.cleanup.keep.days") + except RuntimeError: + pass + + try: + self._hoursToKeep = self.configGetInt( + "database.cleanup.keep.hours") + except RuntimeError: + pass + + try: + self._minutesToKeep = self.configGetInt( + "database.cleanup.keep.minutes") + except RuntimeError: + pass + + return True + + except RuntimeError: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + sys.exit(-1) + + + def printUsage(self): + + print('''Usage: + scbstrip [options] + +Remove event and waveform quality parameters from the database in a timespan.''') + + seiscomp.client.Application.printUsage(self) + + print('''Examples: +Remove all event and waveform QC paramters older than 30 days + scdbstrip -d mysql://sysop:sysop@localhost/seiscomp --days 30 +''') + + def validateParameters(self): + if not seiscomp.client.Application.validateParameters(self): + return False + + try: + try: + self._daysToKeep = self.commandline().optionInt("days") + except RuntimeError: + pass + + try: + self._hoursToKeep = self.commandline().optionInt("hours") + except RuntimeError: + pass + + try: + self._minutesToKeep = self.commandline().optionInt("minutes") + except RuntimeError: + pass + + if self.commandline().hasOption("invert"): + self._invertMode = True + + epOnly = False + if self.commandline().hasOption("ep-only"): + self._stripEP = True + self._stripQC = False + epOnly = True + + if self.commandline().hasOption("qc-only"): + if epOnly: + error.write("ERROR: Option '--qc-only' conflicts with " + "'--ep-only'\n") + return False + else: + self._stripEP = False + self._stripQC = True + + if not self._stripEP and not self._stripQC: + error.write("[INFO] Event and QC parameters are disregarded by" + " configuration\n") + return False + try: + eventIDs = self.commandline().optionString("keep-events") + self._keepEvents = [id.strip() for id in eventIDs.split(',')] + except RuntimeError: + pass + + try: + dateTime = self.commandline().optionString("datetime") + except RuntimeError: + dateTime = None + + if dateTime: + self._daysToKeep = None + self._hoursToKeep = None + self._minutesToKeep = None + + date = seiscomp.core.Time() + try: + if date.fromString(dateTime, "%Y-%m-%d %H:%M:%S"): + error.write("Using datetime option: %s\n" % + date.toString("%Y-%m-%d %H:%M:%S")) + self._datetime = date + else: + error.write("ERROR: datetime has wrong format\n") + return False + except ValueError: + pass + + # fall back to default if no times are given + if (self._daysToKeep is None and dateTime is None and + self._hoursToKeep is None and self._minutesToKeep is None): + self._daysToKeep = 30 + + return True + + except RuntimeError: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + sys.exit(-1) + + def run(self): + classname = self.database().className() + if re.search('postgres', classname, re.IGNORECASE): + self._query = PostgresDB(self.database()) + elif re.search('mysql', classname, re.IGNORECASE): + self._query = MySQLDB(self.database()) + else: + output.write( + "Error: Database interface %s is not supported\n" % (classname)) + output.flush() + return False + + try: + self._timer.restart() + + if self.commandline().hasOption("check"): + return self.check() + + return self.clean() + except RuntimeError: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + sys.exit(-1) + + def checkTable(self, table): + self.runCommand( + "update tmp_object set used=1 where _oid in (select _oid from %s)" + % table) + + def check(self): + try: + if self._datetime is None: + timeSpan = seiscomp.core.TimeSpan(0) + if self._daysToKeep: + timeSpan += seiscomp.core.TimeSpan(self._daysToKeep*24*3600) + + if self._hoursToKeep: + timeSpan += seiscomp.core.TimeSpan(self._hoursToKeep*3600) + + if self._minutesToKeep: + timeSpan += seiscomp.core.TimeSpan(self._minutesToKeep*60) + + # All times are given in localtime + timestamp = seiscomp.core.Time.LocalTime() - timeSpan + else: + timestamp = self._datetime + + output.write("[INFO] Check objects older than %s\n" % + timestamp.toString("%Y-%m-%d %H:%M:%S")) + + tables = self._query.getTables() + if len(tables) == 0: + return False + + if "Object" in tables: + tables.remove("Object") + if "object" in tables: + tables.remove("object") + if "PublicObject" in tables: + tables.remove("PublicObject") + if "publicobject" in tables: + tables.remove("publicobject") + if "Meta" in tables: + tables.remove("Meta") + if "meta" in tables: + tables.remove("meta") + + self._steps = len(tables) + 1 + + if self.commandline().hasOption("clean-unused"): + self._steps = self._steps + 1 + + # Skip the first 5 objects id' that are reserved for metaobjects + # (Config, QualityControl, inventory, EventParameters, routing) + tmp_object = "\ + create temporary table tmp_object as \ + select _oid, 0 as used from Object where _oid > 5 and _timestamp < '%s'\ + " % timestamp.toString("%Y-%m-%d %H:%M:%S") + + self.beginMessage("Search objects") + if not self.runCommand(tmp_object): + return False + self.endMessage(self.globalCount("tmp_object")) + + for table in tables: + self.beginMessage("Check table %s" % table) + self.checkTable(table) + self.endMessage(self.usedCount("tmp_object")) + + unusedObjects = self.unusedCount("tmp_object") + + if self.commandline().hasOption("clean-unused"): + self.delete("Remove unreachable objects", + self.deleteUnusedRawObjects, "tmp_object") + + self.beginMessage("%d unused objects found" % unusedObjects) + if not self.runCommand("drop table tmp_object"): + return False + self.endMessage() + + return True + + except RuntimeException as e: + error.write("\nException: %s\n" % str(e)) + return False + + except: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + sys.exit(-1) + + def clean(self): + try: + if self._datetime is None: + timeSpan = seiscomp.core.TimeSpan(0) + if self._daysToKeep: + timeSpan += seiscomp.core.TimeSpan(self._daysToKeep*24*3600) + + if self._hoursToKeep: + timeSpan += seiscomp.core.TimeSpan(self._hoursToKeep*3600) + + if self._minutesToKeep: + timeSpan += seiscomp.core.TimeSpan(self._minutesToKeep*60) + + # All times are given in GMT (UTC) + timestamp = seiscomp.core.Time.GMT() - timeSpan + else: + timestamp = self._datetime + + if not self._invertMode: + output.write("[INFO] Keep objects after %s UTC\n" % + timestamp.toString("%Y-%m-%d %H:%M:%S")) + else: + output.write("[INFO] Keep objects before %s UTC\n" % + timestamp.toString("%Y-%m-%d %H:%M:%S")) + + if len(self._keepEvents) > 0: + output.write("[INFO] Keep events in db: %s\n" % + ",".join(self._keepEvents)) + + op = '<' + if self._invertMode: + op = '>=' + + self._steps = 32 + + # treat QC entries + if self._stripQC: + self.beginMessage("Deleting waveform quality parameters") + if not self.runCommand( + self._query.deleteObjectQuery("Object", "WaveformQuality") + + "WaveformQuality.%s %s '%s'" % + (self.cnvCol("end"), op, + timestamp.toString("%Y-%m-%d %H:%M:%S"))): + return False + if not self.runCommand("delete from WaveformQuality where WaveformQuality.%s %s '%s'" % (self.cnvCol("end"), op, timestamp.toString("%Y-%m-%d %H:%M:%S"))): + return False + self.endMessage() + + if not self._stripEP: + return True + + # treat event parameters + old_events = "\ + create temporary table old_events as \ + select Event._oid, PEvent.%s \ + from Event, PublicObject as PEvent, Origin, PublicObject as POrigin \ + where Event._oid=PEvent._oid and \ + Origin._oid=POrigin._oid and \ + Event.%s=POrigin.%s and \ + Origin.%s %s '%s'\ + " % (self.cnvCol("publicID"), self.cnvCol("preferredOriginID"), self.cnvCol("publicID"), self.cnvCol("time_value"), op, timestamp.toString("%Y-%m-%d %H:%M:%S")) + + if len(self._keepEvents) > 0: + old_events += " and PEvent." + \ + self.cnvCol("publicID") + \ + " not in ('%s')" % "','".join(self._keepEvents) + + self.beginMessage("Find old events") + if not self.runCommand(old_events): + return False + self.endMessage(self.globalCount("old_events")) + + # Delete OriginReferences of old events + self.delete("Delete origin references of old events", + self.deleteChilds, "OriginReference", "old_events") + + # Delete FocalMechanismReference of old events + self.delete("Delete focal mechanism references of old events", + self.deleteChilds, "FocalMechanismReference", + "old_events") + + # Delete EventDescription of old events + self.delete("Delete event descriptions of old events", + self.deleteChilds, "EventDescription", "old_events") + + # Delete Comments of old events + self.delete("Delete comments of old events", + self.deleteChilds, "Comment", "old_events") + + # Delete old events + self.delete("Delete old events", self.deleteObjects, + "Event", "old_events") + + self.beginMessage("Cleaning up temporary results") + if not self.runCommand("drop table old_events"): + return False + self.endMessage() + + tmp_fm = "\ + create temporary table tmp_fm as \ + select FocalMechanism._oid, PFM.%s, 0 as used \ + from PublicObject as PFM, FocalMechanism \ + where PFM._oid=FocalMechanism._oid\ + " % (self.cnvCol("publicID")) + + self.beginMessage("Find unassociated focal mechanisms") + + if not self.runCommand(tmp_fm): + return False + + tmp_fm = "\ + update tmp_fm set used=1 \ + where " + self.cnvCol("publicID") + " in (select distinct " + self.cnvCol("focalMechanismID") + " from FocalMechanismReference) \ + " + + if not self.runCommand(tmp_fm): + return False + + self.endMessage(self.unusedCount("tmp_fm")) + + # Delete Comments of unassociated focal mechanisms + self.delete("Delete comments of unassociation focal mechanisms", + self.deleteUnusedChilds, "Comment", "tmp_fm") + + # Delete MomentTensor.Comments of unassociated focal mechanisms + self.delete("Delete moment tensor comments of unassociated focal mechanisms", + self.deleteUnusedChilds, "Comment", "MomentTensor", + "tmp_fm") + + # Delete MomentTensor.DataUsed of unassociated focal mechanisms + self.delete("Delete moment tensor data of unassociated focal mechanisms", + self.deleteUnusedChilds, "DataUsed", "MomentTensor", + "tmp_fm") + + # Delete MomentTensor.PhaseSetting of unassociated focal mechanisms + self.delete("Delete moment tensor phase settings of unassociated focal mechanisms", + self.deleteUnusedChilds, "MomentTensorPhaseSetting", + "MomentTensor", "tmp_fm") + + # Delete MomentTensor.StationContribution.ComponentContribution of unassociated focal mechanisms + self.delete("Delete moment tensor component contributions of unassociated focal mechanisms", + self.deleteUnusedChilds, + "MomentTensorComponentContribution", + "MomentTensorStationContribution", "MomentTensor", + "tmp_fm") + + # Delete MomentTensor.StationContributions of unassociated focal mechanisms + self.delete("Delete moment tensor station contributions of unassociated focal mechanisms", + self.deleteUnusedPublicChilds, + "MomentTensorStationContribution", "MomentTensor", + "tmp_fm") + + # Delete MomentTensors of unassociated focal mechanisms + self.delete("Delete moment tensors of unassociated focal mechanisms", + self.deleteUnusedPublicChilds, "MomentTensor", + "tmp_fm") + + # Delete FocalMechanism itself + self.delete("Delete unassociated focal mechanisms", + self.deleteUnusedObjects, "FocalMechanism", "tmp_fm") + + self.beginMessage("Cleaning up temporary results") + if not self.runCommand("drop table tmp_fm"): + return False + self.endMessage() + + tmp_origin = "\ + create temporary table tmp_origin as \ + select Origin._oid, POrigin.%s, 0 as used \ + from PublicObject as POrigin, Origin \ + where POrigin._oid=Origin._oid and \ + Origin.%s %s '%s'\ + " % (self.cnvCol("publicID"), self.cnvCol("time_value"), op, timestamp.toString("%Y-%m-%d %H:%M:%S")) + + self.beginMessage("Find unassociated origins") + + if not self.runCommand(tmp_origin): + return False + + tmp_origin = "\ + update tmp_origin set used=1 \ + where (" + self.cnvCol("publicID") + " in (select distinct " + self.cnvCol("originID") + " from OriginReference)) \ + or (" + self.cnvCol("publicID") + " in (select " + self.cnvCol("derivedOriginID") + " from MomentTensor))" + + if not self.runCommand(tmp_origin): + return False + + self.endMessage(self.unusedCount("tmp_origin")) + + # Delete Arrivals of unassociated origins + self.delete("Delete unassociated arrivals", + self.deleteUnusedChilds, "Arrival", "tmp_origin") + + # Delete StationMagnitudes of unassociated origins + self.delete("Delete unassociated station magnitudes", + self.deleteUnusedPublicChilds, "StationMagnitude", + "tmp_origin") + + # Delete StationMagnitudeContributions of unassociated origins + self.delete("Delete unassociated station magnitude contributions", + self.deleteUnusedChilds, + "StationMagnitudeContribution", "Magnitude", + "tmp_origin") + + # Delete Magnitudes of unassociated origins + self.delete("Delete unassociated magnitudes", + self.deleteUnusedPublicChilds, "Magnitude", + "tmp_origin") + + # Delete Comments of unassociated origins + self.delete("Delete comments of unassociation origins", + self.deleteUnusedChilds, "Comment", "tmp_origin") + + # Delete CompositeTimes of unassociated origins + self.delete("Delete composite times of unassociation origins", + self.deleteUnusedChilds, "CompositeTime", "tmp_origin") + + # Delete Origins itself + self.delete("Delete unassociated origins", + self.deleteUnusedObjects, "Origin", "tmp_origin") + + self.beginMessage("Cleaning up temporary results") + if not self.runCommand("drop table tmp_origin"): + return False + self.endMessage() + + # Delete all unassociated picks (via arrivals) + + self.beginMessage("Find unassociated picks") + + tmp_pick = "\ + create temporary table tmp_pick as \ + select Pick._oid, PPick.%s, 0 as used \ + from PublicObject as PPick, Pick \ + where PPick._oid=Pick._oid and \ + Pick.%s %s '%s' \ + " % (self.cnvCol("publicID"), self.cnvCol("time_value"), op, timestamp.toString("%Y-%m-%d %H:%M:%S")) + + if not self.runCommand(tmp_pick): + return False + + tmp_pick = "\ + update tmp_pick set used=1 \ + where " + self.cnvCol("publicID") + " in \ + (select distinct " + self.cnvCol("pickID") + " from Arrival) \ + " + + if not self.runCommand(tmp_pick): + return False + + self.endMessage(self.unusedCount("tmp_pick")) + + self.delete("Delete unassociated picks", + self.deleteUnusedObjects, "Pick", "tmp_pick") + + self.beginMessage("Cleaning up temporary results") + if not self.runCommand("drop table tmp_pick"): + return False + self.endMessage() + + # Delete all unassociated amplitudes (via stationmagnitudes) + + self.beginMessage("Find unassociated amplitudes") + + tmp_amp = "\ + create temporary table tmp_amp as \ + select Amplitude._oid, PAmplitude.%s, 0 as used \ + from PublicObject as PAmplitude, Amplitude \ + where PAmplitude._oid=Amplitude._oid and \ + Amplitude.%s %s '%s' \ + " % (self.cnvCol("publicID"), self.cnvCol("timeWindow_reference"), op, timestamp.toString("%Y-%m-%d %H:%M:%S")) + + if not self.runCommand(tmp_amp): + return False + + tmp_amp = "\ + update tmp_amp set used=1 \ + where " + self.cnvCol("publicID") + " in \ + (select distinct " + self.cnvCol("amplitudeID") + " from StationMagnitude) \ + " + + if not self.runCommand(tmp_amp): + return False + + self.endMessage(self.unusedCount("tmp_amp")) + + self.delete("Delete unassociated station amplitudes", + self.deleteUnusedObjects, "Amplitude", "tmp_amp") + + self.beginMessage("Cleaning up temporary results") + if not self.runCommand("drop table tmp_amp"): + return False + self.endMessage() + + return True + + except RuntimeException as e: + error.write("\nException: %s\n" % str(e)) + return False + + except: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + sys.exit(-1) + + def cnvCol(self, col): + return self.database().convertColumnName(col) + + def beginMessage(self, msg): + output.write("[%3d%%] " % (self._currentStep*100/self._steps)) + output.write(msg + "...") + output.flush() + self._currentStep = self._currentStep + 1 + + def endMessage(self, count=None): + if not count is None: + output.write("done (%d)" % count) + else: + output.write("done") + + span = self._timer.elapsed().seconds() + output.write(", time spent: %d %02d:%02d:%02d\n" % ( + span / 86400, (span % 86400) / 3600, (span % 3600) / 60, span % 60)) + + def runCommand(self, q): + if self.isExitRequested(): + raise ExitRequestException + + if not self.database().execute(q): + raise RuntimeException("ERROR: command '%s' failed\n" % q) + + if self.isExitRequested(): + raise ExitRequestException + + return True + + def runQuery(self, q): + if self.isExitRequested(): + raise ExitRequestException + + count = "-1" + + if not self.database().beginQuery(q): + raise RuntimeException("ERROR: command '%s' failed\n" % q) + + if self.database().fetchRow(): + count = self.database().getRowFieldString(0) + + self.database().endQuery() + + if self.isExitRequested(): + raise ExitRequestException + + return [count] + + def globalCount(self, table): + return int(self.runQuery("select count(*) from %s" % table)[0]) + + def usedCount(self, table): + return int(self.runQuery("select count(*) from %s where used=1" % table)[0]) + + def unusedCount(self, table): + return int(self.runQuery("select count(*) from %s where used=0" % table)[0]) + + def deleteChilds(self, *v): + count = int(self.runQuery( + self._query.childQuery("count", "Object", *v))[0]) + self.runCommand(self._query.childQuery("delete", "Object", *v)) + self.runCommand(self._query.childQuery("delete", None, *v)) + return count + + def deleteUnusedChilds(self, *v): + count = int(self.runQuery(self._query.childQuery( + "count", "Object", *v) + " and used=0")[0]) + self.runCommand(self._query.childQuery( + "delete", "Object", *v) + " and used=0") + self.runCommand(self._query.childQuery( + "delete", None, *v) + " and used=0") + return count + + def deleteUnusedPublicChilds(self, *v): + count = int(self.runQuery(self._query.childQuery( + "count", "Object", *v) + " and used=0")[0]) + self.runCommand(self._query.childJournalQuery( + "delete", "PublicObject", *v) + " and used=0") + self.runCommand(self._query.childQuery( + "delete", "Object", *v) + " and used=0") + self.runCommand(self._query.childQuery( + "delete", "PublicObject", *v) + " and used=0") + self.runCommand(self._query.childQuery( + "delete", None, *v) + " and used=0") + return count + + def deleteUnusedRawObjects(self, *v): + self.runCommand(self._query.deleteJournalQuery( + "PublicObject", *v) + " and used=0") + self.runCommand(self._query.deleteObjectQuery( + None, "Object", *v) + " and used=0") + self.runCommand(self._query.deleteObjectQuery( + None, "PublicObject", *v) + " and used=0") + return None + + def deleteObjects(self, *v): + self.runCommand(self._query.deleteJournalQuery("PublicObject", *v)) + self.runCommand(self._query.deleteObjectQuery("Object", *v)) + self.runCommand(self._query.deleteObjectQuery("PublicObject", *v)) + self.runCommand(self._query.deleteObjectQuery(None, *v)) + return None + + def deleteUnusedObjects(self, *v): + self.runCommand(self._query.deleteJournalQuery( + "PublicObject", *v) + " and used=0") + self.runCommand(self._query.deleteObjectQuery( + "Object", *v) + " and used=0") + self.runCommand(self._query.deleteObjectQuery( + "PublicObject", *v) + " and used=0") + self.runCommand(self._query.deleteObjectQuery( + None, *v) + " and used=0") + return None + + def delete(self, message, func, *v): + self.beginMessage(message) + count = func(*v) + self.endMessage(count) + return count + + +app = DBCleaner(len(sys.argv), sys.argv) +sys.exit(app()) diff --git a/bin/scdispatch b/bin/scdispatch new file mode 100755 index 0000000..64bf33a Binary files /dev/null and b/bin/scdispatch differ diff --git a/bin/scdumpcfg b/bin/scdumpcfg new file mode 100755 index 0000000..a477628 --- /dev/null +++ b/bin/scdumpcfg @@ -0,0 +1,238 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +from __future__ import division, print_function + +import sys +import os +import seiscomp.client +import seiscomp.datamodel +import seiscomp.config + + +def readParams(sc_params): + if sc_params.baseID(): + sc_params_base = seiscomp.datamodel.ParameterSet.Find( + sc_params.baseID()) + if sc_params_base is None: + sys.stderr.write("Warning: %s: base parameter set for %s not found\n" % ( + sc_params.baseID(), sc_params.publicID())) + params = {} + else: + params = readParams(sc_params_base) + else: + params = {} + + for i in range(sc_params.parameterCount()): + p = sc_params.parameter(i) + params[p.name()] = p.value() + + return params + + +class DumpCfg(seiscomp.client.Application): + def __init__(self, argc, argv): + if argc < 2: + sys.stderr.write("scdumpcfg {modname} [options]\n") + raise RuntimeError + + self.appName = argv[1] + + # Remove first parameter to replace appname with passed module name + argc = argc-1 + argv = argv[1:] + + seiscomp.client.Application.__init__(self, argc, argv) + + self.setMessagingEnabled(True) + self.setMessagingUsername("") + self.setDatabaseEnabled(True, True) + self.setLoadConfigModuleEnabled(True) + self.setDaemonEnabled(False) + + def createCommandLineDescription(self): + self.commandline().addGroup("Dump") + self.commandline().addStringOption("Dump", "param,P", + "Specify parameter name to filter for.") + self.commandline().addOption("Dump", "bindings,B", + "Dump bindings instead of module configuration.") + self.commandline().addOption("Dump", "allow-global,G", + "Print global bindings if no module binding is avaible.") + self.commandline().addOption("Dump", "cfg", + "Print output in .cfg format.") + self.commandline().addOption("Dump", "nslc", + "Print the list of streams which have bindings of the given module.") + + def validateParameters(self): + if not seiscomp.client.Application.validateParameters(self): + return False + + self.dumpBindings = self.commandline().hasOption("bindings") + + try: + self.param = self.commandline().optionString("param") + except: + self.param = None + + self.allowGlobal = self.commandline().hasOption("allow-global") + self.formatCfg = self.commandline().hasOption("cfg") + self.nslc = self.commandline().hasOption("nslc") + + if not self.dumpBindings: + self.setMessagingEnabled(False) + self.setDatabaseEnabled(False, False) + self.setLoadConfigModuleEnabled(False) + + return True + + def initConfiguration(self): + if self.appName == "-h" or self.appName == "--help": + self.printUsage() + return False + + return seiscomp.client.Application.initConfiguration(self) + + # Do nothing. + def initSubscriptions(self): + return True + + def printUsage(self): + + print('''Usage: + {} [options] + +Dump bindings or module configurations used by a specific module or global for +particular stations.'''.format(os.path.basename(__file__)), file=sys.stderr) + + seiscomp.client.Application.printUsage(self) + + print('''Examples: +Dump global bindings configuration for all stations + {} global -d localhost -B > config.xml +'''.format(os.path.basename(__file__)), file=sys.stderr) + + + def run(self): + cfg = self.configuration() + if self.nslc: + nslc = set() + + if not self.dumpBindings: + symtab = cfg.symbolTable() + names = cfg.names() + count = 0 + for name in names: + if self.param and self.param != name: + continue + sym = symtab.get(name) + if self.formatCfg: + if sym.comment: + if count > 0: + sys.stdout.write("\n") + sys.stdout.write("%s\n" % sym.comment) + sys.stdout.write("%s = %s\n" % (sym.name, sym.content)) + else: + sys.stdout.write("%s\n" % sym.name) + sys.stdout.write(" value(s) : %s\n" % + ", ".join(sym.values)) + sys.stdout.write(" source : %s\n" % sym.uri) + count = count + 1 + + if self.param and count == 0: + sys.stderr.write("%s: definition not found\n." % self.param) + else: + cfg = self.configModule() + if cfg is None: + sys.stderr.write("No config module read\n") + return False + + tmp = {} + for i in range(cfg.configStationCount()): + cfg_sta = cfg.configStation(i) + tmp[(cfg_sta.networkCode(), cfg_sta.stationCode())] = cfg_sta + + name = self.name() + # For backward compatibility rename global to default + if name == "global": + name = "default" + + for item in sorted(tmp.keys()): + cfg_sta = tmp[item] + sta_enabled = cfg_sta.enabled() + cfg_setup = seiscomp.datamodel.findSetup( + cfg_sta, name, self.allowGlobal) + + if not cfg_setup is None: + suffix = "" + if sta_enabled and cfg_setup.enabled(): + out = "+ " + else: + suffix = " (" + if not sta_enabled: + suffix += "station disabled" + if not cfg_setup.enabled(): + if suffix: + suffix += ", " + suffix += "setup disabled" + suffix += ")" + out = "- " + out += "%s.%s%s\n" % (cfg_sta.networkCode(), + cfg_sta.stationCode(), suffix) + params = seiscomp.datamodel.ParameterSet.Find( + cfg_setup.parameterSetID()) + if params is None: + sys.stderr.write( + "ERROR: %s: ParameterSet not found\n" % + cfg_setup.parameterSetID()) + return False + + params = readParams(params) + if self.nslc: + try: + sensorLocation = params["detecLocid"] + except: + sensorLocation = "" + try: + detecStream = params["detecStream"] + except: + detecStream = "" + + stream = "%s.%s.%s.%s" % \ + (cfg_sta.networkCode(), cfg_sta.stationCode(), + sensorLocation, detecStream) + nslc.add(stream) + count = 0 + for param_name in sorted(params.keys()): + if self.param and self.param != param_name: + continue + out += " %s: %s\n" % (param_name, params[param_name]) + count = count + 1 + + if not self.nslc and count > 0: + sys.stdout.write(out) + + if self.nslc: + for stream in sorted(nslc): + print(stream, file=sys.stdout) + + return True + + +try: + app = DumpCfg(len(sys.argv), sys.argv) +except: + sys.exit(1) + +sys.exit(app()) diff --git a/bin/scdumpobject b/bin/scdumpobject new file mode 100755 index 0000000..c272dbc --- /dev/null +++ b/bin/scdumpobject @@ -0,0 +1,75 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +import sys +import seiscomp.client, seiscomp.datamodel, seiscomp.io + + +class ObjectDumper(seiscomp.client.Application): + + def __init__(self): + seiscomp.client.Application.__init__(self, len(sys.argv), sys.argv) + self.setMessagingEnabled(True) + self.setDatabaseEnabled(True, False) + self.setMessagingUsername("") + + def createCommandLineDescription(self): + seiscomp.client.Application.createCommandLineDescription(self) + self.commandline().addGroup("Dump") + self.commandline().addStringOption("Dump", "public-id,P", "publicID") + + def loadEventParametersObject(self, publicID): + for tp in \ + seiscomp.datamodel.Pick, seiscomp.datamodel.Amplitude, seiscomp.datamodel.Origin, \ + seiscomp.datamodel.Event, seiscomp.datamodel.FocalMechanism, \ + seiscomp.datamodel.Magnitude, seiscomp.datamodel.StationMagnitude: + + obj = self.query().loadObject(tp.TypeInfo(), publicID) + obj = tp.Cast(obj) + if obj: + ep = seiscomp.datamodel.EventParameters() + ep.add(obj) + return ep + + def loadInventoryObject(self, publicID): + for tp in \ + seiscomp.datamodel.Network, seiscomp.datamodel.Station, seiscomp.datamodel.Sensor, \ + seiscomp.datamodel.SensorLocation, seiscomp.datamodel.Stream: + + obj = self.query().loadObject(tp.TypeInfo(), publicID) + obj = tp.Cast(obj) + if obj: + return obj + + def run(self): + publicID = self.commandline().optionString("public-id") + obj = self.loadEventParametersObject(publicID) + if obj is None: + obj = self.loadInventoryObject(publicID) + if obj is None: + raise ValueError("unknown object '" + publicID + "'") + + # dump formatted XML archive to stdout + ar = seiscomp.io.XMLArchive() + ar.setFormattedOutput(True) + ar.create("-") + ar.writeObject(obj) + ar.close() + return True + + +if __name__ == "__main__": + app = ObjectDumper() + app() diff --git a/bin/sceplog b/bin/sceplog new file mode 100755 index 0000000..162534e --- /dev/null +++ b/bin/sceplog @@ -0,0 +1,76 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +import sys +import os +import seiscomp.client +import seiscomp.datamodel +import seiscomp.io + + +class EventParameterLog(seiscomp.client.Application): + def __init__(self, argc, argv): + seiscomp.client.Application.__init__(self, argc, argv) + + self.setMessagingEnabled(True) + self.setDatabaseEnabled(False, False) + self.setMessagingUsername("") + self.setPrimaryMessagingGroup( + seiscomp.client.Protocol.LISTENER_GROUP) + self.addMessagingSubscription("EVENT") + self.addMessagingSubscription("LOCATION") + self.addMessagingSubscription("MAGNITUDE") + self.addMessagingSubscription("AMPLITUDE") + self.addMessagingSubscription("PICK") + + self.setAutoApplyNotifierEnabled(True) + self.setInterpretNotifierEnabled(True) + + # EventParameter object + self._eventParameters = seiscomp.datamodel.EventParameters() + + def printUsage(self): + + print('''Usage: + sceplog [options] + +Receive event parameters from messaging and write them to stdout in SCML''') + + seiscomp.client.Application.printUsage(self) + + print('''Examples: +Execute sceplog with debug output + sceplog --debug +''') + + def run(self): + if not seiscomp.client.Application.run(self): + return False + + ar = seiscomp.io.XMLArchive() + ar.setFormattedOutput(True) + if ar.create("-"): + ar.writeObject(self._eventParameters) + ar.close() + # Hack to avoid the "close failed in file object destructor" + # exception +# print "" + sys.stdout.write("\n") + + return True + + +app = EventParameterLog(len(sys.argv), sys.argv) +sys.exit(app()) diff --git a/bin/scesv b/bin/scesv new file mode 100755 index 0000000..2081b0f Binary files /dev/null and b/bin/scesv differ diff --git a/bin/scevent b/bin/scevent new file mode 100755 index 0000000..fcd9d1d Binary files /dev/null and b/bin/scevent differ diff --git a/bin/scevtlog b/bin/scevtlog new file mode 100755 index 0000000..19a9763 --- /dev/null +++ b/bin/scevtlog @@ -0,0 +1,850 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +import sys +import os +import traceback +import re +import seiscomp.core +import seiscomp.client +import seiscomp.datamodel +import seiscomp.io +import seiscomp.logging +import seiscomp.system + + +def time2str(time): + """ + Convert a seiscomp.core.Time to a string + """ + return time.toString("%Y-%m-%d %H:%M:%S.%f000000")[:23] + + +def createDirectory(dir): + if os.access(dir, os.W_OK): + return True + + try: + os.makedirs(dir) + return True + except: + return False + + +def originStatusToChar(org): + # Manual origin are always tagged as M + try: + if org.evaluationMode() == seiscomp.datamodel.MANUAL: + return 'M' + except: + pass + + try: + if org.evaluationStatus() == seiscomp.datamodel.PRELIMINARY: + return 'P' + elif org.evaluationStatus() == seiscomp.datamodel.CONFIRMED or \ + org.evaluationStatus() == seiscomp.datamodel.REVIEWED or \ + org.evaluationStatus() == seiscomp.datamodel.FINAL: + return 'C' + elif org.evaluationStatus() == seiscomp.datamodel.REJECTED: + return 'X' + elif org.evaluationStatus() == seiscomp.datamodel.REPORTED: + return 'R' + except: + pass + + return 'A' + + +class CachePopCallback(seiscomp.datamodel.CachePopCallback): + def __init__(self, target): + seiscomp.datamodel.CachePopCallback.__init__(self) + self.target = target + + def handle(self, obj): + self.target.objectAboutToPop(obj) + + +class EventHistory(seiscomp.client.Application): + def __init__(self, argc, argv): + seiscomp.client.Application.__init__(self, argc, argv) + seiscomp.datamodel.Notifier.SetEnabled(False) + + self.setMessagingEnabled(True) + self.setDatabaseEnabled(True, True) + self.setMessagingUsername("scevtlog") + self.setPrimaryMessagingGroup( + seiscomp.client.Protocol.LISTENER_GROUP) + self.addMessagingSubscription("EVENT") + self.addMessagingSubscription("LOCATION") + self.addMessagingSubscription("MAGNITUDE") + + self.setAutoApplyNotifierEnabled(True) + self.setInterpretNotifierEnabled(True) + + # Create a callback object that gets called when an object + # is going to be removed from the cache + self._popCallback = CachePopCallback(self) + + # Create an object cache of half an hour + self._cache = seiscomp.datamodel.PublicObjectTimeSpanBuffer( + self.query(), seiscomp.core.TimeSpan(30.0*60.0)) + self._cache.setPopCallback(self._popCallback) + + # Event progress counter + self._eventProgress = dict() + + # Event-Origin mapping + self._eventToOrg = dict() + self._orgToEvent = dict() + + # Event-Magnitude mapping + self._eventToMag = dict() + self._magToEvent = dict() + + self._directory = "@LOGDIR@/events" + self._format = "xml" + self._currentDirectory = "" + self._revisionFileExt = ".zip" + self._useGZIP = False + + def createCommandLineDescription(self): + try: + self.commandline().addGroup("Storage") + self.commandline().addStringOption( + "Storage", "directory,o", "Specify the storage directory. " + "Default: @LOGDIR@/events.") + self.commandline().addStringOption("Storage", "format,f", + "Specify storage format (autoloc1, autoloc3, xml [default])") + except: + seiscomp.logging.warning( + "caught unexpected error %s" % sys.exc_info()) + return True + + def initConfiguration(self): + if not seiscomp.client.Application.initConfiguration(self): + return False + + try: + self._directory = self.configGetString("directory") + except: + pass + + try: + self._format = self.configGetString("format") + except: + pass + + try: + if self.configGetBool("gzip"): + self._useGZIP = True + self._revisionFileExt = ".gz" + except: + pass + + return True + + def printUsage(self): + print('''Usage: + scevtlog [options] + +Save event history into files''') + + seiscomp.client.Application.printUsage(self) + + print('''Examples: +Execute on command line with debug output + scevtlog --debug +''') + + def init(self): + if not seiscomp.client.Application.init(self): + return False + + try: + self._directory = self.commandline().optionString("directory") + except: + pass + + try: + self._format = self.commandline().optionString("format") + except: + pass + + if self._format != "autoloc1" and self._format != "autoloc3" and self._format != "xml": + self._format = "xml" + + try: + if self._directory[-1] != "/": + self._directory = self._directory + "/" + except: + pass + + if self._directory: + self._directory = seiscomp.system.Environment.Instance().absolutePath(self._directory) + sys.stderr.write("Logging events to %s\n" % self._directory) + + self._cache.setDatabaseArchive(self.query()) + return True + + # def run(self): + # obj = self._cache.get(seiscomp.datamodel.Magnitude, "or080221153929#16#netMag.mb") + + # self.updateObject(obj) + # return True + + def done(self): + seiscomp.client.Application.done(self) + self._cache.setDatabaseArchive(None) + + def printEvent(self, evt, newEvent): + if self._format != "xml": + self.printEventProcAlert(evt, newEvent) + else: + self.printEventXML(evt, newEvent) + self.advanceEventProgress(evt.publicID()) + + def getSummary(self, time, org, mag): + strTime = time.toString("%Y-%m-%d %H:%M:%S") + summary = [strTime, "", "", "", "", "", "", "", "", ""] + + if org: + tim = org.time().value() + latency = time - tim + + summary[1] = "%5d.%02d" % ( + latency.seconds() / 60, (latency.seconds() % 60) * 100 / 60) + + lat = org.latitude().value() + lon = org.longitude().value() + + dep = "%7s" % "---" + try: + dep = "%7.0f" % org.depth().value() + summary[4] = dep + except: + summary[4] = "%7s" % "" + + phases = "%5s" % "---" + try: + phases = "%5d" % org.quality().usedPhaseCount() + summary[5] = phases + except: + summary[5] = "%5s" % "" + + summary[2] = "%7.2f" % lat + summary[3] = "%7.2f" % lon + + try: + summary[9] = originStatusToChar(org) + except: + summary[9] = "-" + + if mag: + summary[6] = "%12s" % mag.type() + summary[7] = "%5.2f" % mag.magnitude().value() + try: + summary[8] = "%5d" % mag.stationCount() + except: + summary[8] = " " + else: + summary[6] = "%12s" % "" + summary[7] = " " + summary[8] = " " + + return summary + + def printEventProcAlert(self, evt, newEvent): + now = seiscomp.core.Time.GMT() + + org = self._cache.get(seiscomp.datamodel.Origin, + evt.preferredOriginID()) + prefmag = self._cache.get( + seiscomp.datamodel.Magnitude, evt.preferredMagnitudeID()) + + summary = self.getSummary(now, org, prefmag) + + # Load arrivals + if org.arrivalCount() == 0: + self.query().loadArrivals(org) + + # Load station magnitudes + if org.stationMagnitudeCount() == 0: + self.query().loadStationMagnitudes(org) + + # Load magnitudes + if org.magnitudeCount() == 0: + self.query().loadMagnitudes(org) + + picks = [] + amps = [] + + if org: + narr = org.arrivalCount() + for i in range(narr): + picks.append(self._cache.get( + seiscomp.datamodel.Pick, org.arrival(i).pickID())) + + nstamags = org.stationMagnitudeCount() + for i in range(nstamags): + amps.append(self._cache.get( + seiscomp.datamodel.Amplitude, org.stationMagnitude(i).amplitudeID())) + + netmag = {} + nmag = org.magnitudeCount() + + bulletin = seiscomp.scbulletin.Bulletin(None, self._format) + try: + txt = bulletin.printEvent(evt) + except: + txt = "" + + if self._directory is None: + sys.stdout.write("%s" % ("#<\n" + txt + "#>\n")) + sys.stdout.flush() + else: + # Use created time to look up the proper directory + try: + arNow = evt.creationInfo().creationTime().get() + # Otherwise use now (in case that event.created has not been set + # which is always valid within the SC3 distribution + except: + arNow = now.get() + seiscomp.logging.error("directory is " + self._directory + "/".join( + ["%.2d" % i for i in arNow[1:4]]) + "/" + evt.publicID() + "/") + + directory = self._directory + \ + "/".join(["%.2d" % i for i in arNow[1:4]]) + \ + "/" + evt.publicID() + "/" + if directory != self._currentDirectory: + if createDirectory(directory) == False: + seiscomp.logging.error( + "Unable to create directory %s" % directory) + return + + self._currentDirectory = directory + self.writeLog(self._currentDirectory + self.convertID(evt.publicID()) + + "." + ("%06d" % self.eventProgress(evt.publicID(), directory)), txt, "w") + self.writeLog(self._currentDirectory + + self.convertID(evt.publicID()) + ".last", txt, "w") + self.writeLog(self._directory + "last", txt, "w") + self.writeLog(self._currentDirectory + self.convertID(evt.publicID()) + ".summary", + "|".join(summary), "a", + "# Layout: Timestamp, +OT (minutes, decimal), Latitude, Longitude, Depth, PhaseCount, MagType, Magnitude, MagCount") + + seiscomp.logging.info("cache size = %d" % self._cache.size()) + + def printEventXML(self, evt, newEvent): + now = seiscomp.core.Time.GMT() + + # Load comments + if evt.commentCount() == 0: + self.query().loadComments(evt) + + # Load origin references + if evt.originReferenceCount() == 0: + self.query().loadOriginReferences(evt) + + # Load event descriptions + if evt.eventDescriptionCount() == 0: + self.query().loadEventDescriptions(evt) + + org = self._cache.get(seiscomp.datamodel.Origin, + evt.preferredOriginID()) + + if evt.preferredFocalMechanismID(): + fm = self._cache.get( + seiscomp.datamodel.FocalMechanism, evt.preferredFocalMechanismID()) + else: + fm = None + + # Load comments + if org.commentCount() == 0: + self.query().loadComments(org) + + # Load arrivals + if org.arrivalCount() == 0: + self.query().loadArrivals(org) + prefmag = self._cache.get( + seiscomp.datamodel.Magnitude, evt.preferredMagnitudeID()) + + wasEnabled = seiscomp.datamodel.PublicObject.IsRegistrationEnabled() + seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False) + + ep = seiscomp.datamodel.EventParameters() + evt_cloned = seiscomp.datamodel.Event.Cast(evt.clone()) + ep.add(evt_cloned) + + summary = self.getSummary(now, org, prefmag) + + if fm: + ep.add(fm) + + seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled) + + # Load focal mechainsm references + if evt.focalMechanismReferenceCount() == 0: + self.query().loadFocalMechanismReferences(evt) + + # Load moment tensors + if fm.momentTensorCount() == 0: + self.query().loadMomentTensors(fm) + + seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False) + + # Copy focal mechanism reference + fm_ref = evt.focalMechanismReference( + seiscomp.datamodel.FocalMechanismReferenceIndex(fm.publicID())) + if fm_ref: + fm_ref_cloned = seiscomp.datamodel.FocalMechanismReference.Cast( + fm_ref.clone()) + if fm_ref_cloned is None: + fm_ref_cloned = seiscomp.datamodel.FocalMechanismReference( + fm.publicID()) + evt_cloned.add(fm_ref_cloned) + + nmt = fm.momentTensorCount() + for i in range(nmt): + mt = fm.momentTensor(i) + if not mt.derivedOriginID(): + continue + + # Origin already added + if ep.findOrigin(mt.derivedOriginID()) is not None: + continue + + seiscomp.datamodel.PublicObject.SetRegistrationEnabled( + wasEnabled) + derivedOrigin = self._cache.get( + seiscomp.datamodel.Origin, mt.derivedOriginID()) + seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False) + + if derivedOrigin is None: + seiscomp.logging.warning( + "derived origin for MT %s not found" % mt.derivedOriginID()) + continue + + # Origin has been read from database -> read all childs + if not self._cache.cached(): + seiscomp.datamodel.PublicObject.SetRegistrationEnabled( + wasEnabled) + self.query().load(derivedOrigin) + seiscomp.datamodel.PublicObject.SetRegistrationEnabled( + False) + + # Add it to the event parameters + ep.add(derivedOrigin) + + if org: + seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled) + + # Load magnitudes + if org.magnitudeCount() == 0: + self.query().loadMagnitudes(org) + + if org.stationMagnitudeCount() == 0: + self.query().loadStationMagnitudes(org) + + seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False) + + # Copy event comments + ncmts = evt.commentCount() + for i in range(ncmts): + cmt_cloned = seiscomp.datamodel.Comment.Cast( + evt.comment(i).clone()) + evt_cloned.add(cmt_cloned) + + # Copy origin references + org_ref = evt.originReference( + seiscomp.datamodel.OriginReferenceIndex(org.publicID())) + if org_ref: + org_ref_cloned = seiscomp.datamodel.OriginReference.Cast( + org_ref.clone()) + if org_ref_cloned is None: + org_ref_cloned = seiscomp.datamodel.OriginReference( + org.publicID()) + evt_cloned.add(org_ref_cloned) + + # Copy event descriptions + for i in range(evt.eventDescriptionCount()): + ed_cloned = seiscomp.datamodel.EventDescription.Cast( + evt.eventDescription(i).clone()) + evt_cloned.add(ed_cloned) + + org_cloned = seiscomp.datamodel.Origin.Cast(org.clone()) + ep.add(org_cloned) + + # Copy origin comments + ncmts = org.commentCount() + for i in range(ncmts): + cmt_cloned = seiscomp.datamodel.Comment.Cast( + org.comment(i).clone()) + org_cloned.add(cmt_cloned) + + # Copy arrivals + narr = org.arrivalCount() + for i in range(narr): + arr_cloned = seiscomp.datamodel.Arrival.Cast( + org.arrival(i).clone()) + org_cloned.add(arr_cloned) + + seiscomp.datamodel.PublicObject.SetRegistrationEnabled( + wasEnabled) + pick = self._cache.get( + seiscomp.datamodel.Pick, arr_cloned.pickID()) + seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False) + + if pick: + pick_cloned = seiscomp.datamodel.Pick.Cast(pick.clone()) + ep.add(pick_cloned) + + # Copy network magnitudes + nmag = org.magnitudeCount() + for i in range(nmag): + mag = org.magnitude(i) + + mag_cloned = seiscomp.datamodel.Magnitude.Cast(mag.clone()) + + seiscomp.datamodel.PublicObject.SetRegistrationEnabled( + wasEnabled) + if mag.stationMagnitudeContributionCount() == 0: + self.query().loadStationMagnitudeContributions(mag) + seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False) + + # Copy magnitude references + nmagref = mag.stationMagnitudeContributionCount() + for j in range(nmagref): + mag_ref_cloned = seiscomp.datamodel.StationMagnitudeContribution.Cast( + mag.stationMagnitudeContribution(j).clone()) + mag_cloned.add(mag_ref_cloned) + + org_cloned.add(mag_cloned) + + # Copy station magnitudes and station amplitudes + smag = org.stationMagnitudeCount() + amp_map = dict() + for i in range(smag): + mag_cloned = seiscomp.datamodel.StationMagnitude.Cast( + org.stationMagnitude(i).clone()) + org_cloned.add(mag_cloned) + if (mag_cloned.amplitudeID() in amp_map) == False: + amp_map[mag_cloned.amplitudeID()] = True + seiscomp.datamodel.PublicObject.SetRegistrationEnabled( + wasEnabled) + amp = self._cache.get( + seiscomp.datamodel.Amplitude, mag_cloned.amplitudeID()) + seiscomp.datamodel.PublicObject.SetRegistrationEnabled( + False) + if amp: + amp_cloned = seiscomp.datamodel.Amplitude.Cast( + amp.clone()) + ep.add(amp_cloned) + + seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled) + + # archive.create(event.publicID() + ) + ar = seiscomp.io.XMLArchive() + ar.setFormattedOutput(True) + + if self._directory is None: + sys.stdout.write("#<\n") + ar.create("-") + ar.writeObject(ep) + ar.close() + sys.stdout.write("#>\n") + sys.stdout.flush() + else: + # Use created time to look up the proper directory + try: + arNow = evt.creationInfo().creationTime().get() + # Otherwise use now (in case that event.created has not been set + # which is always valid within the SC3 distribution + except: + arNow = now.get() + + directory = self._directory + \ + "/".join(["%.2d" % i for i in arNow[1:4]]) + \ + "/" + evt.publicID() + "/" + if directory != self._currentDirectory: + if createDirectory(directory) == False: + seiscomp.logging.error( + "Unable to create directory %s" % directory) + return + + self._currentDirectory = directory + # self.writeLog(self._currentDirectory + evt.publicID(), "#<\n" + txt + "#>\n") + #self.writeLog(self._currentDirectory + evt.publicID() + ".last", txt, "w") + ar.create(self._currentDirectory + self.convertID(evt.publicID()) + "." + ("%06d" % + self.eventProgress(evt.publicID(), directory)) + ".xml" + self._revisionFileExt) + ar.setCompression(True) + if self._useGZIP: + ar.setCompressionMethod(seiscomp.io.XMLArchive.GZIP) + ar.writeObject(ep) + ar.close() + # Write last file to root + ar.create(self._directory + "last.xml" + self._revisionFileExt) + ar.setCompression(True) + if self._useGZIP: + ar.setCompressionMethod(seiscomp.io.XMLArchive.GZIP) + ar.writeObject(ep) + ar.close() + # Write last xml + ar.create(self._currentDirectory + + self.convertID(evt.publicID()) + ".last.xml") + ar.setCompression(False) + ar.writeObject(ep) + ar.close() + self.writeLog(self._currentDirectory + self.convertID(evt.publicID()) + ".summary", + "|".join(summary), "a", + "# Layout: Timestamp, +OT (minutes, decimal), Latitude, Longitude, Depth, PhaseCount, MagType, Magnitude, MagCount") + + del ep + + def convertID(self, id): + '''Converts an ID containing slashes to one without slashes''' + p = re.compile('/') + return p.sub('_', id) + + def writeLog(self, file, text, mode="a", header=None): + of = open(file, mode) + if of: + if of.tell() == 0 and not header is None: + of.write(header+"\n") + of.write(text+"\n") + of.close() + else: + seiscomp.logging.error("Unable to write file: %s" % file) + + def objectAboutToPop(self, obj): + try: + evt = seiscomp.datamodel.Event.Cast(obj) + if evt: + try: + self._orgToEvent.pop(evt.preferredOriginID()) + self._eventToOrg.pop(evt.publicID()) + + self._magToEvent.pop(evt.preferredMagnitudeID()) + self._eventToMag.pop(evt.publicID()) + + self._eventProgress.pop(evt.publicID()) + return + except: + pass + + org = seiscomp.datamodel.Origin.Cast(obj) + if org: + try: + self._orgToEvent.pop(org.publicID()) + except: + pass + return + + mag = seiscomp.datamodel.Magnitude.Cast(obj) + if mag: + try: + self._magToEvent.pop(mag.publicID()) + except: + pass + return + except: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + sys.exit(-1) + + def eventProgress(self, evtID, directory): + # The progress is already stored + if evtID in self._eventProgress: + return self._eventProgress[evtID] + + # Find the maximum file counter + maxid = -1 + files = os.listdir(directory) + for file in files: + if os.path.isfile(directory + file) == False: + continue + fid = file[len(evtID + '.'):len(file)] + sep = fid.find('.') + if sep == -1: + sep = len(fid) + fid = fid[0:sep] + try: + nid = int(fid) + except: + continue + if nid > maxid: + maxid = nid + + maxid = maxid + 1 + self._eventProgress[evtID] = maxid + return maxid + + def advanceEventProgress(self, evtID): + try: + self._eventProgress[evtID] = self._eventProgress[evtID] + 1 + except: + pass + + def addObject(self, parentID, object): + try: + obj = seiscomp.datamodel.Event.Cast(object) + if obj: + self._cache.feed(obj) + self._eventProgress[obj.publicID()] = 0 + self.printEvent(obj, True) + self.updateCache(obj) + return + + # New Magnitudes or Origins are not important for + # the history update but we feed it into the cache to + # access them faster later on in case they will become + # preferred entities + obj = seiscomp.datamodel.Magnitude.Cast(object) + if obj: + self._cache.feed(obj) + return + + obj = seiscomp.datamodel.Origin.Cast(object) + if obj: + self._cache.feed(obj) + return + + obj = seiscomp.datamodel.Pick.Cast(object) + if obj: + self._cache.feed(obj) + return + + obj = seiscomp.datamodel.Amplitude.Cast(object) + if obj: + self._cache.feed(obj) + return + + except: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + sys.exit(-1) + + def updateObject(self, parentID, object): + try: + obj = seiscomp.datamodel.Event.Cast(object) + if obj: + self._cache.feed(obj) + self.printEvent(obj, False) + self.updateCache(obj) + return + + # Updates of a Magnitude are only imported when it is + # the preferred one. + obj = seiscomp.datamodel.Magnitude.Cast(object) + if obj: + try: + evtID = self._magToEvent[obj.publicID()] + if evtID: + self._cache.feed(obj) + evt = self._cache.get(seiscomp.datamodel.Event, evtID) + if evt: + self.printEvent(evt, False) + else: + sys.stderr.write("Unable to fetch event for ID '%s' while update of magnitude '%s'\n" % ( + evtID, obj.publicID())) + else: + # Magnitude has not been associated to an event yet + pass + except: + # Search the corresponding event from the database + evt = self.query().getEventByPreferredMagnitudeID(obj.publicID()) + # Associate the event (even if None) with the magnitude ID + if evt: + self._magToEvent[obj.publicID()] = evt.publicID() + self._cache.feed(obj) + self.printEvent(evt, False) + else: + self._magToEvent[obj.publicID()] = None + return + + # Usually we do not update origins. To have it complete, + # this case will be supported as well + obj = seiscomp.datamodel.Origin.Cast(object) + if obj: + try: + evtID = self._orgToEvent[obj.publicID()] + if evtID: + self._cache.feed(obj) + evt = self._cache.get(seiscomp.datamodel.Event, evtID) + if evt: + self.printEvent(evt, False) + else: + sys.stderr.write("Unable to fetch event for ID '%s' while update of origin '%s'\n" % ( + evtID, obj.publicID())) + else: + # Origin has not been associated to an event yet + pass + except: + # Search the corresponding event from the database + evt = self.query().getEvent(obj.publicID()) + if evt: + if evt.preferredOriginID() != obj.publicID(): + evt = None + + # Associate the event (even if None) with the origin ID + if evt: + self._orgToEvent[obj.publicID()] = evt.publicID() + self._cache.feed(obj) + self.printEvent(evt, False) + else: + self._orgToEvent[obj.publicID()] = None + return + + return + + except: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + sys.exit(-1) + + def updateCache(self, evt): + # Event-Origin update + try: + orgID = self._eventToOrg[evt.publicID()] + if orgID != evt.preferredOriginID(): + self._orgToEvent.pop(orgID) + except: + # origin not yet registered + pass + + # Bind the current preferred origin ID to the event and + # vice versa + self._orgToEvent[evt.preferredOriginID()] = evt.publicID() + self._eventToOrg[evt.publicID()] = evt.preferredOriginID() + + # Event-Magnitude update + try: + magID = self._eventToMag[evt.publicID()] + if magID != evt.preferredMagnitudeID(): + self._magToEvent.pop(magID) + except: + # not yet registered + pass + + # Bind the current preferred magnitude ID to the event and + # vice versa + self._magToEvent[evt.preferredMagnitudeID()] = evt.publicID() + self._eventToMag[evt.publicID()] = evt.preferredMagnitudeID() + + +app = EventHistory(len(sys.argv), sys.argv) +sys.exit(app()) diff --git a/bin/scevtls b/bin/scevtls new file mode 100755 index 0000000..d59871d --- /dev/null +++ b/bin/scevtls @@ -0,0 +1,197 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +import sys +import seiscomp.core +import seiscomp.client +import seiscomp.datamodel +import seiscomp.logging + + +def _parseTime(timestring): + t = seiscomp.core.Time() + if t.fromString(timestring, "%F %T"): + return t + if t.fromString(timestring, "%FT%T"): + return t + if t.fromString(timestring, "%FT%TZ"): + return t + return None + + +class EventList(seiscomp.client.Application): + + def __init__(self, argc, argv): + seiscomp.client.Application.__init__(self, argc, argv) + + self.setMessagingEnabled(False) + self.setDatabaseEnabled(True, False) + self.setDaemonEnabled(False) + + self._startTime = None + self._endTime = None + self.hours = None + self._delimiter = None + self._modifiedAfterTime = None + self._preferredOrigin = False + + def createCommandLineDescription(self): + self.commandline().addGroup("Events") + self.commandline().addStringOption("Events", "begin", + "Specify the lower bound of the " + "time interval.") + self.commandline().addStringOption("Events", "end", + "Specify the upper bound of the " + "time interval.") + self.commandline().addStringOption("Events", "hours", + "Start searching given hours before" + " now. If set, --begin and --end " + "are ignored.") + self.commandline().addStringOption("Events", "modified-after", + "Select events modified after the " + "specified time.") + + self.commandline().addGroup("Output") + self.commandline().addStringOption("Output", "delimiter,D", + "Specify the delimiter of the " + "resulting event IDs. " + "Default: '\\n')") + self.commandline().addOption("Output", "preferred-origin,p", + "Print the ID of the preferred origin " + "along with the event ID.") + return True + + def init(self): + if not seiscomp.client.Application.init(self): + return False + + try: + self.hours = float(self.commandline().optionString("hours")) + except RuntimeError: + pass + + end = "2500-01-01T00:00:00Z" + if self.hours is None: + try: + start = self.commandline().optionString("begin") + except RuntimeError: + start = "1900-01-01T00:00:00Z" + + self._startTime = _parseTime(start) + if self._startTime is None: + seiscomp.logging.error("Wrong 'begin' format '%s'" % start) + return False + seiscomp.logging.debug("Setting start to %s" + % self._startTime.toString("%FT%TZ")) + + try: + end = self.commandline().optionString("end") + except RuntimeError: + pass + + self._endTime = _parseTime(end) + if self._endTime is None: + seiscomp.logging.error("Wrong 'end' format '%s'" % end) + return False + seiscomp.logging.debug("Setting end to %s" + % self._endTime.toString("%FT%TZ")) + else: + seiscomp.logging.debug("Time window set by hours option: ignoring " + "all other time parameters") + secs = self.hours*3600 + maxSecs = 596523 * 3600 + if secs > maxSecs: + seiscomp.logging.error("Maximum hours exceeeded. Maximum is %i" % (maxSecs / 3600)) + return False + + self._startTime = seiscomp.core.Time.UTC() - seiscomp.core.TimeSpan(secs) + self._endTime = _parseTime(end) + + try: + self._delimiter = self.commandline().optionString("delimiter") + except RuntimeError: + self._delimiter = "\n" + + try: + modifiedAfter = self.commandline().optionString("modified-after") + self._modifiedAfterTime = _parseTime(modifiedAfter) + if self._modifiedAfterTime is None: + seiscomp.logging.error("Wrong 'modified-after' format '%s'" + % modifiedAfter) + return False + seiscomp.logging.debug( + "Setting 'modified-after' time to %s" % + self._modifiedAfterTime.toString("%FT%TZ")) + except RuntimeError: + pass + + try: + self._preferredOrigin = self.commandline().hasOption("preferred-origin") + except RuntimeError: + pass + + return True + + def printUsage(self): + + print('''Usage: + scevtls [options] + +List event IDs available in a given time range and print to stdout.''') + + seiscomp.client.Application.printUsage(self) + + print('''Examples: +Print all event IDs from year 2022 and thereafter + scevtls -d mysql://sysop:sysop@localhost/seiscomp --begin "2022-01-01 00:00:00" +''') + + def run(self): + out = [] + seiscomp.logging.debug("Search interval: %s - %s" % + (self._startTime, self._endTime)) + for obj in self.query().getEvents(self._startTime, self._endTime): + evt = seiscomp.datamodel.Event.Cast(obj) + if not evt: + continue + + if self._modifiedAfterTime is not None: + try: + if evt.creationInfo().modificationTime() < self._modifiedAfterTime: + continue + except ValueError: + continue + + outputString = evt.publicID() + if self._preferredOrigin: + try: + outputString += " " + evt.preferredOriginID() + except ValueError: + outputString += " none" + + out.append(outputString) + + sys.stdout.write("%s\n" % self._delimiter.join(out)) + + return True + + +def main(): + app = EventList(len(sys.argv), sys.argv) + app() + + +if __name__ == "__main__": + main() diff --git a/bin/scevtstreams b/bin/scevtstreams new file mode 100755 index 0000000..61c4493 --- /dev/null +++ b/bin/scevtstreams @@ -0,0 +1,432 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +from __future__ import absolute_import, division, print_function + +import sys + +from seiscomp import client, core, datamodel, io + + +class EventStreams(client.Application): + + def __init__(self, argc, argv): + client.Application.__init__(self, argc, argv) + + self.setMessagingEnabled(False) + self.setDatabaseEnabled(True, False) + self.setDaemonEnabled(False) + + self.eventID = None + self.inputFile = None + self.inputFormat = "xml" + self.margin = [300] + + self.allNetworks = True + self.allStations = True + self.allLocations = True + self.allStreams = True + self.allComponents = True + + # filter + self.network = None + self.station = None + + self.streams = [] + + # output format + self.caps = False + self.fdsnws = False + + + def createCommandLineDescription(self): + self.commandline().addGroup("Input") + self.commandline().addStringOption( + "Input", "input,i", + "read event from XML file instead of database. Use '-' to read " + "from stdin.") + self.commandline().addStringOption( + "Input", "format,f", + "input format to use (xml [default], zxml (zipped xml), binary). " + "Only relevant with --input.") + + self.commandline().addGroup("Dump") + self.commandline().addStringOption("Dump", "event,E", "event id") + self.commandline().addStringOption( + "Dump", "margin,m", + "time margin around the picked time window, default is 300. Added " + "before the first and after the last pick, respectively. Use 2 " + "comma-separted values (before,after) for asymmetric margins, e.g. " + "-m 120,300.") + self.commandline().addStringOption( + "Dump", "streams,S", + "comma separated list of streams per station to add, e.g. BH,SH,HH") + self.commandline().addOption( + "Dump", "all-streams", + "dump all streams. If unused, just streams with picks are dumped.") + self.commandline().addIntOption( + "Dump", "all-components,C", + "all components or just the picked ones (0). Default is 1") + self.commandline().addIntOption( + "Dump", "all-locations,L", + "all locations or just the picked ones (0). Default is 1") + self.commandline().addOption( + "Dump", "all-stations", + "dump all stations from the same network. If unused, just stations " + "with picks are dumped.") + self.commandline().addOption( + "Dump", "all-networks", + "dump all networks. If unused, just networks with picks are dumped." + " This option implies all-stations, all-locations, all-streams, " + "all-components and will only provide the time window.") + self.commandline().addOption( + "Dump", "resolve-wildcards,R", + "if all components are used, use inventory to resolve stream " + "components instead of using '?' (important when Arclink should be " + "used)") + self.commandline().addStringOption( + "Dump", "net-sta", "Filter streams by network code or network and " + "station code. Format: NET or NET.STA") + self.commandline().addOption( + "Dump", "caps", + "dump in capstool format (Common Acquisition Protocol Server by " + "gempa GmbH)") + self.commandline().addOption( + "Dump", "fdsnws", + "dump in FDSN dataselect webservice POST format") + return True + + + def validateParameters(self): + if not client.Application.validateParameters(self): + return False + + if self.commandline().hasOption("resolve-wildcards"): + self.setLoadStationsEnabled(True) + + try: + self.inputFile = self.commandline().optionString("input") + self.setDatabaseEnabled(False, False) + except BaseException: + pass + + return True + + + def init(self): + + if not client.Application.init(self): + return False + + try: + self.inputFormat = self.commandline().optionString("format") + except BaseException: + pass + + try: + self.eventID = self.commandline().optionString("event") + except BaseException: + if not self.inputFile: + raise ValueError("An eventID is mandatory if no input file is " + "specified") + + try: + self.margin = self.commandline().optionString("margin").split(",") + except BaseException: + pass + + try: + self.streams = self.commandline().optionString("streams").split(",") + except BaseException: + pass + + try: + self.allComponents = self.commandline().optionInt("all-components") != 0 + except BaseException: + pass + + try: + self.allLocations = self.commandline().optionInt("all-locations") != 0 + except BaseException: + pass + + self.allStreams = self.commandline().hasOption("all-streams") + self.allStations = self.commandline().hasOption("all-stations") + self.allNetworks = self.commandline().hasOption("all-networks") + + try: + networkStation = self.commandline().optionString("net-sta") + except RuntimeError: + networkStation = None + + if networkStation: + try: + self.network = networkStation.split('.')[0] + except IndexError: + print("Error in network code '{}': Use '--net-sta' with " + "format NET or NET.STA".format(networkStation), file=sys.stderr) + return False + + try: + self.station = networkStation.split('.')[1] + except IndexError: + pass + + self.caps = self.commandline().hasOption("caps") + self.fdsnws = self.commandline().hasOption("fdsnws") + + return True + + + def printUsage(self): + + print('''Usage: + scevtstreams [options] + +Extract stream information and time windows from an event''') + + client.Application.printUsage(self) + + print('''Examples: +Get the time windows for an event in the database: + scevtstreams -E gfz2012abcd -d mysql://sysop:sysop@localhost/seiscomp + +Create lists compatible with fdsnws: + scevtstreams -E gfz2012abcd -i event.xml -m 120,500 --fdsnws +''') + + def run(self): + + resolveWildcards = self.commandline().hasOption("resolve-wildcards") + + picks = [] + + # read picks from input file + if self.inputFile: + picks = self.readXML() + if not picks: + raise ValueError("Could not find picks in input file") + + # read picks from database + else: + for obj in self.query().getEventPicks(self.eventID): + pick = datamodel.Pick.Cast(obj) + if pick is None: + continue + picks.append(pick) + + if not picks: + raise ValueError("Could not find picks for event {} in " + "database".format(self.eventID)) + + # filter picks + pickFiltered = [] + if self.network: + for pick in picks: + if pick.waveformID().networkCode() != self.network: + continue + if self.station and self.station != pick.waveformID().stationCode(): + continue + pickFiltered.append(pick) + + picks = pickFiltered + + if not picks: + raise ValueError("All picks filtered out") + + # calculate minimum and maximum pick time + minTime = None + maxTime = None + for pick in picks: + if minTime is None or minTime > pick.time().value(): + minTime = pick.time().value() + + if maxTime is None or maxTime < pick.time().value(): + maxTime = pick.time().value() + + # add time margin(s), no need for None check since pick time is + # mandatory and at least on pick exists + minTime = minTime - core.TimeSpan(float(self.margin[0])) + maxTime = maxTime + core.TimeSpan(float(self.margin[-1])) + + # convert times to string dependend on requested output format + if self.caps: + timeFMT = "%Y,%m,%d,%H,%M,%S" + elif self.fdsnws: + timeFMT = "%FT%T" + else: + timeFMT = "%F %T" + minTime = minTime.toString(timeFMT) + maxTime = maxTime.toString(timeFMT) + + inv = client.Inventory.Instance().inventory() + + lines = set() + for pick in picks: + net = pick.waveformID().networkCode() + station = pick.waveformID().stationCode() + loc = pick.waveformID().locationCode() + streams = [pick.waveformID().channelCode()] + rawStream = streams[0][:2] + + if self.allComponents: + if resolveWildcards: + iloc = datamodel.getSensorLocation(inv, pick) + if iloc: + tc = datamodel.ThreeComponents() + datamodel.getThreeComponents( + tc, iloc, rawStream, pick.time().value()) + streams = [] + if tc.vertical(): + streams.append(tc.vertical().code()) + if tc.firstHorizontal(): + streams.append(tc.firstHorizontal().code()) + if tc.secondHorizontal(): + streams.append(tc.secondHorizontal().code()) + else: + streams = [rawStream + "?"] + + if self.allLocations: + loc = "*" + + if self.allStations: + station = "*" + + if self.allNetworks: + net = "*" + station = "*" + loc = "*" + + # FDSNWS requires empty location to be encoded by 2 dashes + if not loc and self.fdsnws: + loc = "--" + + # line format + if self.caps: + lineFMT = "{0} {1} {2} {3} {4} {5}" + elif self.fdsnws: + lineFMT = "{2} {3} {4} {5} {0} {1}" + else: + lineFMT = "{0};{1};{2}.{3}.{4}.{5}" + + for s in streams: + if self.allStreams or self.allNetworks: + s = "*" + + lines.add(lineFMT.format( + minTime, maxTime, net, station, loc, s)) + + for s in self.streams: + if s == rawStream: + continue + + if self.allStreams or self.allNetworks: + s = "*" + + lines.add(lineFMT.format( + minTime, maxTime, net, station, loc, s + streams[0][2])) + + for line in sorted(lines): + print(line, file=sys.stdout) + + return True + + + def readXML(self): + + if self.inputFormat == "xml": + ar = io.XMLArchive() + elif self.inputFormat == "zxml": + ar = io.XMLArchive() + ar.setCompression(True) + elif self.inputFormat == "binary": + ar = io.VBinaryArchive() + else: + raise TypeError("unknown input format '{}'".format( + self.inputFormat)) + + if not ar.open(self.inputFile): + raise IOError("unable to open input file") + + obj = ar.readObject() + if obj is None: + raise TypeError("invalid input file format") + + ep = datamodel.EventParameters.Cast(obj) + if ep is None: + raise ValueError("no event parameters found in input file") + + # we require at least one origin which references to picks via arrivals + if ep.originCount() == 0: + raise ValueError("no origin found in input file") + + originIDs = [] + + # search for a specific event id + if self.eventID: + ev = datamodel.Event.Find(self.eventID) + if ev: + originIDs = [ev.originReference(i).originID() \ + for i in range(ev.originReferenceCount())] + else: + raise ValueError("event id {} not found in input file".format( + self.eventID)) + + # use first event/origin if no id was specified + else: + # no event, use first available origin + if ep.eventCount() == 0: + if ep.originCount() > 1: + print("WARNING: Input file contains no event but more than " + "1 origin. Considering only first origin", + file=sys.stderr) + originIDs.append(ep.origin(0).publicID()) + + # use origin references of first available event + else: + if ep.eventCount() > 1: + print("WARNING: Input file contains more than 1 event. " + "Considering only first event", file=sys.stderr) + ev = ep.event(0) + originIDs = [ev.originReference(i).originID() \ + for i in range(ev.originReferenceCount())] + + # collect pickIDs + pickIDs = set() + for oID in originIDs: + o = datamodel.Origin.Find(oID) + if o is None: + continue + + for i in range(o.arrivalCount()): + pickIDs.add(o.arrival(i).pickID()) + + # lookup picks + picks = [] + for pickID in pickIDs: + pick = datamodel.Pick.Find(pickID) + if pick: + picks.append(pick) + + return picks + + +if __name__ == '__main__': + try: + app = EventStreams(len(sys.argv), sys.argv) + sys.exit(app()) + except (ValueError, TypeError) as e: + print("ERROR: {}".format(e), file=sys.stderr) + sys.exit(1) diff --git a/bin/scgitinit b/bin/scgitinit new file mode 100755 index 0000000..6c25b47 --- /dev/null +++ b/bin/scgitinit @@ -0,0 +1,38 @@ +#!/bin/bash +# Initializes a GIT repository in $SEISCOMP_ROOT and adds important +# configuration files from 'etc' and 'share' directory +# +# Author: Stephan Herrnkind + + +# search for SeisComP path +if [ x"$SEISCOMP_ROOT" = x ]; then + echo "SEISCOMP_ROOT not set" + exit 1 +fi + +# search git binary +which git > /dev/null +if [ $? -ne 0 ]; then + echo "git binary not found" + exit 2 +fi + +cd $SEISCOMP_ROOT || exit 3 + +# initialize git if necessary +[ -d .git ] || git rev-parse --git-dir > /dev/null 2>&1 +if [ $? -eq 0 ]; then + echo "GIT repository in $SEISCOMP_ROOT already initialized" +else + git init || exit 4 +fi + +# add files +git add etc +find share -type f -regex \ + ".*\.\(bna\|cfg\|conf\|htaccess\|kml\|py\|sh\|tpl\|tvel\|txt\|xml\)" \ + -execdir git add {} + + +echo "files added to GIT, use 'git status' to get an overview and " \ + "'git commit' to commit them" diff --git a/bin/scheli b/bin/scheli new file mode 100755 index 0000000..db78ecc Binary files /dev/null and b/bin/scheli differ diff --git a/bin/scimex b/bin/scimex new file mode 100755 index 0000000..f4286f3 Binary files /dev/null and b/bin/scimex differ diff --git a/bin/scimport b/bin/scimport new file mode 100755 index 0000000..e386f0d Binary files /dev/null and b/bin/scimport differ diff --git a/bin/scinv b/bin/scinv new file mode 100755 index 0000000..697cbe9 Binary files /dev/null and b/bin/scinv differ diff --git a/bin/scm b/bin/scm new file mode 100755 index 0000000..f142781 Binary files /dev/null and b/bin/scm differ diff --git a/bin/scmag b/bin/scmag new file mode 100755 index 0000000..44788e6 Binary files /dev/null and b/bin/scmag differ diff --git a/bin/scmapcut b/bin/scmapcut new file mode 100755 index 0000000..d18646f Binary files /dev/null and b/bin/scmapcut differ diff --git a/bin/scmaster b/bin/scmaster new file mode 100755 index 0000000..e16b81c Binary files /dev/null and b/bin/scmaster differ diff --git a/bin/scml2inv b/bin/scml2inv new file mode 100755 index 0000000..a315edf --- /dev/null +++ b/bin/scml2inv @@ -0,0 +1,84 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +import sys +import getopt +import seiscomp.io +import seiscomp.datamodel + + +usage = """scml2inv [options] input output=stdout + +Options: + -h [ --help ] Produce help message + -f Enable formatted XML output +""" + + +def main(argv): + formatted = False + + # parse command line options + try: + opts, args = getopt.getopt(argv[1:], "hf", ["help"]) + except getopt.error as msg: + sys.stderr.write("%s\n" % msg) + sys.stderr.write("for help use --help\n") + return 1 + + for o, a in opts: + if o in ["-h", "--help"]: + sys.stderr.write("%s\n" % usage) + return 1 + elif o in ["-f"]: + formatted = True + + argv = args + if len(argv) < 1: + sys.stderr.write("Missing input file\n") + return 1 + + ar = seiscomp.io.XMLArchive() + if not ar.open(argv[0]): + sys.stderr.write("Unable to parse input file: %s\n" % argv[0]) + return 2 + + obj = ar.readObject() + ar.close() + + if obj is None: + sys.stderr.write("Empty document in %s\n" % argv[0]) + return 3 + + inv = seiscomp.datamodel.Inventory.Cast(obj) + if inv is None: + sys.stderr.write("No inventory found in %s\n" % argv[0]) + return 4 + + if len(argv) < 2: + output_file = "-" + else: + output_file = argv[1] + + ar.create(output_file) + ar.setFormattedOutput(formatted) + ar.writeObject(inv) + ar.close() + + return 0 + + +if __name__ == "__main__": + sys.exit(main(sys.argv)) diff --git a/bin/scmm b/bin/scmm new file mode 100755 index 0000000..7ecef2e Binary files /dev/null and b/bin/scmm differ diff --git a/bin/scmssort b/bin/scmssort new file mode 100755 index 0000000..26c5482 --- /dev/null +++ b/bin/scmssort @@ -0,0 +1,416 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +from __future__ import absolute_import, division, print_function + +import sys +import os +import re +import argparse +import seiscomp.core +import seiscomp.io + + +class MyArgumentParser(argparse.ArgumentParser): + def format_epilog(self): + return self.epilog + + +def str2time(timestring): + """ + Liberally accept many time string formats and convert them to a + seiscomp.core.Time + """ + + timestring = timestring.strip() + for c in ["-", "/", ":", "T", "Z"]: + timestring = timestring.replace(c, " ") + timestring = timestring.split() + assert 3 <= len(timestring) <= 6 + timestring.extend((6 - len(timestring)) * ["0"]) + timestring = " ".join(timestring) + timeFormat = "%Y %m %d %H %M %S" + if timestring.find(".") != -1: + timeFormat += ".%f" + + time = seiscomp.core.Time() + time.fromString(timestring, timeFormat) + return time + + +def time2str(time): + """ + Convert a seiscomp.core.Time to a string + """ + return time.toString("%Y-%m-%d %H:%M:%S.%f000000")[:23] + + +def recordInput(filename=None, datatype=seiscomp.core.Array.INT): + """ + Simple Record iterator that reads from a file (to be specified by + filename) or -- if no filename was specified -- reads from standard input + """ + + stream = seiscomp.io.RecordStream.Create("file") + if not stream: + raise IOError("failed to create a RecordStream") + + if not filename: + filename = "-" + + if filename == "-": + print( + "Waiting for data input from stdin. Use Ctrl + C to interrupt.", + file=sys.stderr, + ) + else: + if not os.path.exists(filename): + print("Cannot find file {}".format(filename), file=sys.stderr) + sys.exit() + + if not stream.setSource(filename): + print(" + failed to assign source file to RecordStream", file=sys.stderr) + sys.exit() + + records = seiscomp.io.RecordInput(stream, datatype, seiscomp.core.Record.SAVE_RAW) + + while True: + try: + record = next(records) + except Exception: + print("Received invalid or no input", file=sys.stderr) + sys.exit() + + if not record: + return + yield record + + +tmin = str2time("1970-01-01 00:00:00") +tmax = str2time("2500-01-01 00:00:00") +ifile = "-" + +description = ( + "Read unsorted and possibly multiplexed miniSEED files. " + "Sort data by time (multiplexing) and filter the individual " + "records by time and/or streams. Apply this before playbacks " + "and waveform archiving." +) + +epilog = ( + "Examples:\n" + "Read data from multiple files, extract streams by time, sort records by start " + "time, remove duplicate records\n" + " cat f1.mseed f2.mseed f3.mseed |\\\n" + " scmssort -v -t '2007-03-28 15:48~2007-03-28 16:18' -u > sorted.mseed\n" + "\n" + "Extract streams by time, stream code and sort records by end time\n" + " echo CX.PB01..BH? |\\ \n" + " scmssort -v -E -t '2007-03-28 15:48~2007-03-28 16:18' -u -l - test.mseed > " + "sorted.mseed" +) + + +# p = MyArgumentParser( +# usage="\n %prog [options] [files | < ] > ", description=description, epilog=epilog +# ) +p = MyArgumentParser( + description=description, + epilog=epilog, + formatter_class=argparse.RawDescriptionHelpFormatter, +) +p.add_argument( + "-E", + "--sort-by-end-time", + action="store_true", + help="Sort according to record end time; default is start time.", +) +p.add_argument( + "-r", + "--rm", + action="store_true", + help="Remove all traces in stream list given by --list instead of keeping them.", +) +p.add_argument( + "-l", + "--list", + action="store", + help="File with stream list to filter the records. " + "One stream per line. Instead of a file read the from stdin (-). " + "Line format: NET.STA.LOC.CHA - wildcards and regular expressions " + "are considered. Example: CX.*..BH?.", +) +p.add_argument( + "-t", + "--time-window", + action="store", + help="Specify time window (as one -properly quoted- string). Times " + "are of course UTC and separated by a tilde '~'.", +) +p.add_argument( + "-u", + "--uniqueness", + action="store_true", + help="Ensure uniqueness of output, i.e. skip duplicate records.", +) +p.add_argument("-v", "--verbose", action="store_true", help="Run in verbose mode.") + +p.add_argument( + "filenames", + nargs="+", + help="Names of input files in miniSEED format.", +) +opt = p.parse_args() +filenames = opt.filenames + +if opt.time_window: + tmin, tmax = list(map(str2time, opt.time_window.split("~"))) + +if opt.verbose: + print( + "Considered time window: %s~%s" % (time2str(tmin), time2str(tmax)), + file=sys.stderr, + ) + +listFile = None +removeStreams = False +if opt.list: + listFile = opt.list + print("Considered stream list from: %s" % (listFile), file=sys.stderr) + + if opt.rm: + removeStreams = True + print("Removing listed streams", file=sys.stderr) + + +def _time(record): + if opt.sort_by_end_time: + return seiscomp.core.Time(record.endTime()) + return seiscomp.core.Time(record.startTime()) + + +def _in_time_window(record, tMin, tMax): + return record.endTime() >= tMin and record.startTime() <= tMax + + +def readStreamList(file): + streamList = [] + + try: + if file == "-": + f = sys.stdin + file = "stdin" + else: + f = open(listFile, "r", encoding="utf-8") + except FileNotFoundError: + print("%s: error: unable to open" % listFile, file=sys.stderr) + return [] + + lineNumber = -1 + for line in f: + lineNumber = lineNumber + 1 + line = line.strip() + # ignore comments + if len(line) > 0 and line[0] == "#": + continue + + if len(line) == 0: + continue + + toks = line.split(".") + if len(toks) != 4: + f.close() + print( + "error: %s in line %d has invalid line format, expected " + "stream list: NET.STA.LOC.CHA - 1 line per stream including " + "regular expressions" % (listFile, lineNumber), + file=sys.stderr, + ) + return [] + + streamList.append(line) + + f.close() + + if len(streamList) == 0: + return [] + + return streamList + + +if not filenames: + filenames = ["-"] + +streams = None +if listFile: + streams = readStreamList(listFile) + if not streams and not removeStreams: + print(" + cannot extract data", file=sys.stderr) + sys.exit() + + if opt.verbose: + string = " + streams: " + + for stream in streams: + string += stream + " " + print("%s" % (string), file=sys.stderr) + + pattern = re.compile("|".join(streams)) + +readRecords = 0 +networks = set() +stations = set() +locations = set() +channels = set() +readStreams = set() +outEnd = None +outStart = None + +if filenames: + first = None + time_raw = [] + for fileName in filenames: + if opt.verbose: + print("Reading file '%s'" % fileName, file=sys.stderr) + + for rec in recordInput(fileName): + if not rec: + continue + + if not _in_time_window(rec, tmin, tmax): + continue + + raw = rec.raw().str() + streamCode = "%s.%s.%s.%s" % ( + rec.networkCode(), + rec.stationCode(), + rec.locationCode(), + rec.channelCode(), + ) + + if listFile: + foundStream = False + + if pattern.match(streamCode): + foundStream = True + + if removeStreams: + foundStream = not foundStream + + if not foundStream: + continue + + # collect statistics for verbosity mode + if opt.verbose: + networks.add(rec.networkCode()) + stations.add(rec.stationCode()) + locations.add(rec.locationCode()) + channels.add(rec.channelCode()) + readStreams.add(streamCode) + readRecords += 1 + + start = rec.startTime() + end = rec.endTime() + + if (outStart is None) or (start < outStart): + outStart = seiscomp.core.Time(start) + + if (outEnd is None) or (end > outEnd): + outEnd = seiscomp.core.Time(end) + + t = _time(rec) + if first is None: + first = t + t = float(t - first) # float needs less memory + time_raw.append((t, raw)) + + if opt.verbose: + print( + " + %d networks, %d stations, %d sensor locations, " + "%d channel codes, %d streams, %d records" + % ( + len(networks), + len(stations), + len(locations), + len(channels), + len(readStreams), + readRecords, + ), + file=sys.stderr, + ) + print("Sorting records", file=sys.stderr) + time_raw.sort() + + if opt.verbose: + print("Writing output", file=sys.stderr) + previous = None + + out = sys.stdout + try: + # needed in Python 3, fails in Python 2 + out = out.buffer + except AttributeError: + # assuming this is Python 2, nothing to be done + pass + + duplicates = 0 + for item in time_raw: + if item == previous: + duplicates += 1 + if opt.uniqueness: + continue + + t, raw = item + out.write(raw) + + previous = item + + if opt.verbose: + print("Finished", file=sys.stderr) + if opt.uniqueness: + print( + " + found and removed {} duplicate records".format(duplicates), + file=sys.stderr, + ) + else: + if duplicates > 0: + print( + " + found {} duplicate records - remove with: scmssort -u".format( + duplicates + ), + file=sys.stderr, + ) + else: + print(" + found 0 duplicate records", file=sys.stderr) + + print("Output:", file=sys.stderr) + if outStart and outEnd: + print( + " + time window: %s~%s" + % (seiscomp.core.Time(outStart), seiscomp.core.Time(outEnd)), + file=sys.stderr, + ) + else: + print("No data found in time window", file=sys.stderr) + + else: + # This is an important hint which should always be printed + if duplicates > 0 and not opt.uniqueness: + print( + "Found {} duplicate records - remove with: scmssort -u".format( + duplicates + ), + file=sys.stderr, + ) diff --git a/bin/scmv b/bin/scmv new file mode 100755 index 0000000..d3079d5 Binary files /dev/null and b/bin/scmv differ diff --git a/bin/scolv b/bin/scolv new file mode 100755 index 0000000..940bf3f Binary files /dev/null and b/bin/scolv differ diff --git a/bin/scorg2nll b/bin/scorg2nll new file mode 100755 index 0000000..66a72aa Binary files /dev/null and b/bin/scorg2nll differ diff --git a/bin/scorgls b/bin/scorgls new file mode 100755 index 0000000..9ddc67f --- /dev/null +++ b/bin/scorgls @@ -0,0 +1,131 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +import sys +import seiscomp.core +import seiscomp.client +import seiscomp.datamodel + + +class OriginList(seiscomp.client.Application): + def __init__(self, argc, argv): + seiscomp.client.Application.__init__(self, argc, argv) + + self.setMessagingEnabled(False) + self.setDatabaseEnabled(True, False) + self.setDaemonEnabled(False) + + self._startTime = seiscomp.core.Time() + self._endTime = seiscomp.core.Time.GMT() + self._delimiter = None + + def createCommandLineDescription(self): + self.commandline().addGroup("Origins") + self.commandline().addStringOption("Origins", "begin", + "The lower bound of the time interval. Format: '1970-01-01 00:00:00'.") + self.commandline().addStringOption("Origins", "end", + "The upper bound of the time interval. Format: '1970-01-01 00:00:00'.") + self.commandline().addStringOption("Origins", "author", + "The author of the origins.") + + self.commandline().addGroup("Output") + self.commandline().addStringOption("Output", "delimiter,D", + "The delimiter of the resulting " + "origin IDs. Default: '\\n')") + return True + + def init(self): + if not seiscomp.client.Application.init(self): + return False + + try: + start = self.commandline().optionString("begin") + if not self._startTime.fromString(start, "%F %T"): + print("Wrong 'begin' given -> assuming {}" + .format(self._startTime), file=sys.stderr) + except RuntimeError: + print("No 'begin' given -> assuming {}".format(self._startTime), + file=sys.stderr) + + try: + end = self.commandline().optionString("end") + if not self._endTime.fromString(end, "%F %T"): + print("Wrong 'end' given -> assuming {}" + .format(self._endTime), file=sys.stderr) + except RuntimeError: + print("No 'end' given -> assuming {}".format(self._endTime), + file=sys.stderr) + + try: + self.author = self.commandline().optionString("author") + sys.stderr.write("%s author used for output\n" % (self.author)) + except RuntimeError: + self.author = False + + try: + self._delimiter = self.commandline().optionString("delimiter") + except RuntimeError: + self._delimiter = "\n" + +# sys.stderr.write("Setting end to %s\n" % self._endTime.toString("%F %T")) + + return True + + def printUsage(self): + + print('''Usage: + scorgls [options] + +List origin IDs available in a given time range and print to stdout.''') + + seiscomp.client.Application.printUsage(self) + + print('''Examples: +Print all origin IDs from year 2022 and thereafter + scorgls -d mysql://sysop:sysop@localhost/seiscomp --begin "2022-01-01 00:00:00" +''') + + def run(self): + seiscomp.logging.debug("Search interval: %s - %s" % + (self._startTime, self._endTime)) + out = [] + q = "select PublicObject.%s, Origin.* from Origin, PublicObject where Origin._oid=PublicObject._oid and Origin.%s >= '%s' and Origin.%s < '%s'" %\ + (self.database().convertColumnName("publicID"), + self.database().convertColumnName("time_value"), + self.database().timeToString(self._startTime), + self.database().convertColumnName("time_value"), + self.database().timeToString(self._endTime)) + + if self.author: + q += " and Origin.%s = '%s' " %\ + (self.database().convertColumnName("creationInfo_author"), + self.query().toString(self.author)) + + for obj in self.query().getObjectIterator(q, seiscomp.datamodel.Origin.TypeInfo()): + org = seiscomp.datamodel.Origin.Cast(obj) + if org: + out.append(org.publicID()) + + print("{}\n".format(self._delimiter.join(out)), file=sys.stdout) + return True + + +def main(): + app = OriginList(len(sys.argv), sys.argv) + app() + + +if __name__ == "__main__": + main() diff --git a/bin/scplot b/bin/scplot new file mode 100755 index 0000000..4bb1c83 Binary files /dev/null and b/bin/scplot differ diff --git a/bin/scproclat b/bin/scproclat new file mode 100755 index 0000000..908c0a5 --- /dev/null +++ b/bin/scproclat @@ -0,0 +1,328 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +import time, sys, os, traceback +import seiscomp.core, seiscomp.client, seiscomp.datamodel +import seiscomp.logging, seiscomp.system + + +def createDirectory(dir): + if os.access(dir, os.W_OK): + return True + + try: + os.makedirs(dir) + return True + except: + return False + + +def timeToString(t): + return t.toString("%T.%6f") + + +def timeSpanToString(ts): + neg = ts.seconds() < 0 or ts.microseconds() < 0 + secs = abs(ts.seconds()) + days = secs / 86400 + daySecs = secs % 86400 + hours = daySecs / 3600 + hourSecs = daySecs % 3600 + mins = hourSecs / 60 + secs = hourSecs % 60 + usecs = abs(ts.microseconds()) + + if neg: + return "-%.2d:%.2d:%.2d:%.2d.%06d" % (days, hours, mins, secs, usecs) + else: + return "%.2d:%.2d:%.2d:%.2d.%06d" % (days, hours, mins, secs, usecs) + + +class ProcLatency(seiscomp.client.Application): + def __init__(self, argc, argv): + seiscomp.client.Application.__init__(self, argc, argv) + + self.setMessagingEnabled(True) + self.setDatabaseEnabled(False, False) + + self.setAutoApplyNotifierEnabled(False) + self.setInterpretNotifierEnabled(True) + + self.addMessagingSubscription("PICK") + self.addMessagingSubscription("AMPLITUDE") + self.addMessagingSubscription("LOCATION") + self.addMessagingSubscription("MAGNITUDE") + self.addMessagingSubscription("EVENT") + + self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP) + + self._directory = "" + self._nowDirectory = "" + self._triggeredDirectory = "" + self._logCreated = False + + def createCommandLineDescription(self): + try: + self.commandline().addGroup("Storage") + self.commandline().addStringOption( + "Storage", "directory,o", "Specify the storage directory") + except: + seiscomp.logging.warning( + "caught unexpected error %s" % sys.exc_info()) + + def initConfiguration(self): + if not seiscomp.client.Application.initConfiguration(self): + return False + + try: + self._directory = self.configGetString("directory") + except: + pass + + try: + self._logCreated = self.configGetBool("logMsgLatency") + except: + pass + + return True + + def init(self): + if not seiscomp.client.Application.init(self): + return False + + try: + self._directory = self.commandline().optionString("directory") + except: + pass + + try: + if self._directory[-1] != "/": + self._directory = self._directory + "/" + except: + pass + + if self._directory: + self._directory = seiscomp.system.Environment.Instance().absolutePath(self._directory) + sys.stderr.write("Logging latencies to %s\n" % self._directory) + + return True + + def addObject(self, parentID, obj): + try: + self.logObject(parentID, obj, False) + except: + sys.stderr.write("%s\n" % traceback.format_exc()) + + def updateObject(self, parentID, obj): + try: + self.logObject("", obj, True) + except: + sys.stderr.write("%s\n" % traceback.format_exc()) + + def logObject(self, parentID, obj, update): + now = seiscomp.core.Time.GMT() + time = None + + pick = seiscomp.datamodel.Pick.Cast(obj) + if pick: + phase = "" + try: + phase = pick.phaseHint().code() + except: + pass + + created = None + if self._logCreated: + try: + created = pick.creationInfo().creationTime() + except: + pass + + self.logStation(now, created, pick.time().value( + ), pick.publicID() + ";P;" + phase, pick.waveformID(), update) + return + + amp = seiscomp.datamodel.Amplitude.Cast(obj) + if amp: + created = None + if self._logCreated: + try: + created = amp.creationInfo().creationTime() + except: + pass + + try: + self.logStation(now, created, amp.timeWindow().reference(), amp.publicID( + ) + ";A;" + amp.type() + ";" + "%.2f" % amp.amplitude().value(), amp.waveformID(), update) + except: + pass + return + + org = seiscomp.datamodel.Origin.Cast(obj) + if org: + status = "" + lat = "%.2f" % org.latitude().value() + lon = "%.2f" % org.longitude().value() + try: + depth = "%d" % org.depth().value() + except: + pass + + try: + status = seiscomp.datamodel.EOriginStatusNames.name( + org.status()) + except: + pass + + self.logFile(now, org.time().value(), org.publicID( + ) + ";O;" + status + ";" + lat + ";" + lon + ";" + depth, update) + return + + mag = seiscomp.datamodel.Magnitude.Cast(obj) + if mag: + count = "" + try: + count = "%d" % mag.stationCount() + except: + pass + self.logFile(now, None, mag.publicID() + ";M;" + mag.type() + + ";" + "%.4f" % mag.magnitude().value() + ";" + count, update) + return + + orgref = seiscomp.datamodel.OriginReference.Cast(obj) + if orgref: + self.logFile(now, None, parentID + ";OR;" + + orgref.originID(), update) + return + + evt = seiscomp.datamodel.Event.Cast(obj) + if evt: + self.logFile(now, None, evt.publicID( + ) + ";E;" + evt.preferredOriginID() + ";" + evt.preferredMagnitudeID(), update) + return + + def logStation(self, received, created, triggered, text, waveformID, update): + streamID = waveformID.networkCode() + "." + waveformID.stationCode() + "." + \ + waveformID.locationCode() + "." + waveformID.channelCode() + + aNow = received.get() + aTriggered = triggered.get() + + nowDirectory = self._directory + \ + "/".join(["%.2d" % i for i in aNow[1:4]]) + "/" + triggeredDirectory = self._directory + \ + "/".join(["%.2d" % i for i in aTriggered[1:4]]) + "/" + + logEntry = timeSpanToString(received - triggered) + ";" + if created is not None: + logEntry = logEntry + timeSpanToString(received - created) + ";" + else: + logEntry = logEntry + ";" + + if update: + logEntry = logEntry + "U" + else: + logEntry = logEntry + "A" + + logEntry = logEntry + ";" + text + + sys.stdout.write("%s;%s\n" % (timeToString(received), logEntry)) + + if nowDirectory != self._nowDirectory: + if createDirectory(nowDirectory) == False: + seiscomp.logging.error( + "Unable to create directory %s" % nowDirectory) + return False + + self._nowDirectory = nowDirectory + + self.writeLog(self._nowDirectory + streamID + ".rcv", + timeToString(received) + ";" + logEntry) + + if triggeredDirectory != self._triggeredDirectory: + if createDirectory(triggeredDirectory) == False: + seiscomp.logging.error( + "Unable to create directory %s" % triggeredDirectory) + return False + + self._triggeredDirectory = triggeredDirectory + + self.writeLog(self._triggeredDirectory + streamID + + ".trg", timeToString(triggered) + ";" + logEntry) + + return True + + def logFile(self, received, triggered, text, update): + aNow = received.get() + nowDirectory = self._directory + \ + "/".join(["%.2d" % i for i in aNow[1:4]]) + "/" + triggeredDirectory = None + + #logEntry = timeToString(received) + logEntry = "" + + if not triggered is None: + aTriggered = triggered.get() + triggeredDirectory = self._directory + \ + "/".join(["%.2d" % i for i in aTriggered[1:4]]) + "/" + + logEntry = logEntry + timeSpanToString(received - triggered) + + logEntry = logEntry + ";" + + if update: + logEntry = logEntry + "U" + else: + logEntry = logEntry + "A" + + logEntry = logEntry + ";" + text + + sys.stdout.write("%s;%s\n" % (timeToString(received), logEntry)) + + if nowDirectory != self._nowDirectory: + if createDirectory(nowDirectory) == False: + seiscomp.logging.error( + "Unable to create directory %s" % nowDirectory) + return False + + self._nowDirectory = nowDirectory + + self.writeLog(self._nowDirectory + "objects.rcv", + timeToString(received) + ";" + logEntry) + + if triggeredDirectory: + if triggeredDirectory != self._triggeredDirectory: + if createDirectory(triggeredDirectory) == False: + seiscomp.logging.error( + "Unable to create directory %s" % triggeredDirectory) + return False + + self._triggeredDirectory = triggeredDirectory + + self.writeLog(self._triggeredDirectory + "objects.trg", + timeToString(triggered) + ";" + logEntry) + + return True + + def writeLog(self, file, text): + of = open(file, "a") + if of: + of.write(text) + of.write("\n") + of.close() + + +app = ProcLatency(len(sys.argv), sys.argv) +sys.exit(app()) diff --git a/bin/scqc b/bin/scqc new file mode 100755 index 0000000..9c25211 Binary files /dev/null and b/bin/scqc differ diff --git a/bin/scqcv b/bin/scqcv new file mode 100755 index 0000000..13f7bb3 Binary files /dev/null and b/bin/scqcv differ diff --git a/bin/scquery b/bin/scquery new file mode 100755 index 0000000..7c84319 Binary files /dev/null and b/bin/scquery differ diff --git a/bin/scqueryqc b/bin/scqueryqc new file mode 100755 index 0000000..568449b --- /dev/null +++ b/bin/scqueryqc @@ -0,0 +1,252 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) 2021 by gempa GmbH # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +# # +# adopted from scqcquery # +# Author: Dirk Roessler, gempa GmbH # +# Email: roessler@gempa.de # +############################################################################ + +from __future__ import absolute_import, division, print_function + +import sys +import re +import seiscomp.core +import seiscomp.client +import seiscomp.io +import seiscomp.datamodel + +qcParamsDefault = "latency,delay,timing,offset,rms,availability,"\ + "'gaps count','gaps interval','gaps length',"\ + "'overlaps count','overlaps interval','overlaps length',"\ + "'spikes count','spikes interval','spikes amplitude'" + + +def getStreamsFromInventory(self): + try: + dbr = seiscomp.datamodel.DatabaseReader(self.database()) + inv = seiscomp.datamodel.Inventory() + dbr.loadNetworks(inv) + + streamList = set() + for inet in range(inv.networkCount()): + network = inv.network(inet) + dbr.load(network) + for ista in range(network.stationCount()): + station = network.station(ista) + try: + start = station.start() + except Exception: + continue + try: + end = station.end() + if not start <= self._end <= end and end >= self._start: + continue + except Exception: + pass + + for iloc in range(station.sensorLocationCount()): + location = station.sensorLocation(iloc) + for istr in range(location.streamCount()): + stream = location.stream(istr) + streamID = network.code() + "." + station.code() \ + + "." + location.code() + "." + stream.code() + streamList.add(streamID) + + return list(streamList) + + except Exception: + return False + + +class WfqQuery(seiscomp.client.Application): + + def __init__(self, argc, argv): + seiscomp.client.Application.__init__(self, argc, argv) + + self.setMessagingEnabled(False) + self.setDatabaseEnabled(True, False) + self.setLoggingToStdErr(True) + self.setDaemonEnabled(False) + + self._streams = False + self._fromInventory = False + self._outfile = '-' + self._parameter = qcParamsDefault + self._start = "1900-01-01T00:00:00Z" + self._end = str(seiscomp.core.Time.GMT()) + self._formatted = False + + def createCommandLineDescription(self): + self.commandline().addGroup("Output") + self.commandline().addStringOption("Output", "output,o", + "output file name for XML. Writes " + "to stdout if not given.") + self.commandline().addOption("Output", "formatted,f", + "write formatted XML") + + self.commandline().addGroup("Query") + self.commandline().addStringOption( + "Query", "begin,b", "Begin time of query: 'YYYY-MM-DD hh:mm:ss'") + self.commandline().addStringOption( + "Query", "end,e", "End time of query: 'YYYY-MM-DD hh:mm:ss'") + self.commandline().addStringOption( + "Query", "stream-id,i", + "Waveform stream ID to search for QC parameters: net.sta.loc.cha -" + " [networkCode].[stationCode].[sensorLocationCode].[channelCode]. " + "Provide a single ID or a comma-separated list. Overrides " + "--streams-from-inventory") + self.commandline().addStringOption( + "Query", "parameter,p", + "QC parameter to output: (e.g. delay, rms, 'gaps count' ...). " + "Provide a single parameter or a comma-separated list. Defaults " + "apply if parameter is not given.") + self.commandline().addOption("Query", "streams-from-inventory", + "Read streams from inventory. Superseded" + " by stream-id.") + + + return True + + def printUsage(self): + print('''Usage: + scqueryqc [options] + +Query a database for waveform quality control (QC) parameters.''', file=sys.stderr) + + seiscomp.client.Application.printUsage(self) + + print('''Default QC parameters: {} + '''.format(qcParamsDefault), file=sys.stderr) + print('''Examples: +Query rms and delay values for streams 'AU.AS18..SHZ' and 'AU.AS19..SHZ' from '2021-11-20 00:00:00' until current + scqueryqc -d localhost -b '2021-11-20 00:00:00' -p rms,delay -i AU.AS18..SHZ,AU.AS19..SHZ + ''', file=sys.stderr) + + def validateParameters(self): + if not seiscomp.client.Application.validateParameters(self): + return False + + try: + self._streams = self.commandline().optionString("stream-id").split(",") + except RuntimeError: + pass + + try: + self._fromInventory = self.commandline().hasOption("streams-from-inventory") + except RuntimeError: + pass + + if not self._streams and not self._fromInventory: + print("Provide streamID(s): --stream-id or --streams-from-inventory", + file=sys.stderr) + return False + + try: + self._outfile = self.commandline().optionString("output") + except RuntimeError: + print("No output file name given: Sending to stdout", + file=sys.stderr) + + try: + self._start = self.commandline().optionString("begin") + except RuntimeError: + print("No begin time given, considering: {}".format(self._start), + file=sys.stderr) + + try: + self._end = self.commandline().optionString("end") + except RuntimeError: + print("No end time given, considering 'now': {}".format(self._end), + file=sys.stderr) + + try: + self._parameter = self.commandline().optionString("parameter") + except RuntimeError: + print("No QC parameter given, using default", file=sys.stderr) + + try: + self._formatted = self.commandline().hasOption("formatted") + except RuntimeError: + pass + + return True + + def run(self): + if not self.query(): + print("No database connection!\n", file=sys.stderr) + return False + + streams = self._streams + if not streams and self._fromInventory: + try: + streams = getStreamsFromInventory(self) + except RuntimeError: + print("No streams read from database!\n", file=sys.stderr) + return False + + if not streams: + print("Empty stream list") + return False + + for stream in streams: + if re.search("[*?]", stream): + print("Wildcards in streamID are not supported: {}\n" + .format(stream), file=sys.stderr) + return False + + print("Request:", file=sys.stderr) + print(" streams: {}".format(str(streams)), file=sys.stderr) + print(" number of streams: {}".format(len(streams)), file=sys.stderr) + print(" begin time: {}".format(str(self._start)), file=sys.stderr) + print(" end time: {}".format(str(self._end)), file=sys.stderr) + print(" parameters: {}".format(str(self._parameter)), + file=sys.stderr) + print("Output:", file=sys.stderr) + print(" file: {}".format(self._outfile), file=sys.stderr) + print(" formatted XML: {}".format(self._formatted), file=sys.stderr) + + # create archive + xarc = seiscomp.io.XMLArchive() + if not xarc.create(self._outfile, True, True): + print("Unable to write XML to {}!\n".format(self._outfile), + file=sys.stderr) + return False + xarc.setFormattedOutput(self._formatted) + qc = seiscomp.datamodel.QualityControl() + + # write parameters + for parameter in self._parameter.split(","): + for stream in streams: + (net, sta, loc, cha) = stream.split(".") + it = self.query().getWaveformQuality(seiscomp.datamodel.WaveformStreamID(net, sta, loc, cha, ""), + parameter, + seiscomp.core.Time.FromString( + self._start, "%Y-%m-%d %H:%M:%S"), + seiscomp.core.Time.FromString(self._end, "%Y-%m-%d %H:%M:%S")) + + while it.get(): + try: + wfq = seiscomp.datamodel.WaveformQuality.Cast(it.get()) + qc.add(wfq) + except Exception: + pass + it.step() + + xarc.writeObject(qc) + xarc.close() + return True + + +app = WfqQuery(len(sys.argv), sys.argv) +sys.exit(app()) diff --git a/bin/screloc b/bin/screloc new file mode 100755 index 0000000..cdef1e0 Binary files /dev/null and b/bin/screloc differ diff --git a/bin/scrttv b/bin/scrttv new file mode 100755 index 0000000..e445b41 Binary files /dev/null and b/bin/scrttv differ diff --git a/bin/scsendjournal b/bin/scsendjournal new file mode 100755 index 0000000..61b5a00 --- /dev/null +++ b/bin/scsendjournal @@ -0,0 +1,83 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +import sys +import seiscomp.core +import seiscomp.client +import seiscomp.datamodel + + +class SendJournal(seiscomp.client.Application): + def __init__(self, argc, argv): + seiscomp.client.Application.__init__(self, argc, argv) + self.setDatabaseEnabled(False, False) + self.setMessagingEnabled(True) + self.setMessagingUsername("") + self.setPrimaryMessagingGroup("EVENT") + + def init(self): + if not seiscomp.client.Application.init(self): + return False + self.params = self.commandline().unrecognizedOptions() + if len(self.params) < 2: + sys.stderr.write( + self.name() + " [opts] {objectID} {action} [parameters]\n") + return False + return True + + def printUsage(self): + + print('''Usage: + scsendjournal [options] + +Send journaling information to the messaging to manipulate event parameters''') + + seiscomp.client.Application.printUsage(self) + + print('''Examples: +Set the type of the event with ID gempa2021abcd to 'earthquake' + scsendjournal -H localhost gempa2021abcd EvType "earthquake" +''') + + def run(self): + msg = seiscomp.datamodel.NotifierMessage() + + entry = seiscomp.datamodel.JournalEntry() + entry.setCreated(seiscomp.core.Time.GMT()) + entry.setObjectID(self.params[0]) + entry.setSender(self.author()) + entry.setAction(self.params[1]) + + sys.stderr.write( + "Sending entry (" + entry.objectID() + "," + entry.action() + ")\n") + + if len(self.params) > 2: + entry.setParameters(self.params[2]) + + n = seiscomp.datamodel.Notifier( + seiscomp.datamodel.Journaling.ClassName(), seiscomp.datamodel.OP_ADD, entry) + msg.attach(n) + self.connection().send(msg) + + return True + + +def main(argc, argv): + app = SendJournal(argc, argv) + return app() + + +if __name__ == "__main__": + sys.exit(main(len(sys.argv), sys.argv)) diff --git a/bin/scsendorigin b/bin/scsendorigin new file mode 100755 index 0000000..ee92953 --- /dev/null +++ b/bin/scsendorigin @@ -0,0 +1,94 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +import sys +import seiscomp.core +import seiscomp.datamodel +import seiscomp.client +import seiscomp.logging + + +class SendOrigin(seiscomp.client.Application): + + def __init__(self, argc, argv): + seiscomp.client.Application.__init__(self, argc, argv) + self.setDatabaseEnabled(False, False) + self.setMessagingEnabled(True) + self.setPrimaryMessagingGroup("GUI") + + def init(self): + if not seiscomp.client.Application.init(self): + return False + + try: + cstr = self.commandline().optionString("coord") + tstr = self.commandline().optionString("time") + except: + sys.stderr.write( + "Must specify origin using '--coord lat,lon,dep --time time'\n") + return False + + self.origin = seiscomp.datamodel.Origin.Create() + + ci = seiscomp.datamodel.CreationInfo() + ci.setAgencyID(self.agencyID()) + ci.setCreationTime(seiscomp.core.Time.GMT()) + self.origin.setCreationInfo(ci) + + lat, lon, dep = list(map(float, cstr.split(","))) + self.origin.setLongitude(seiscomp.datamodel.RealQuantity(lon)) + self.origin.setLatitude(seiscomp.datamodel.RealQuantity(lat)) + self.origin.setDepth(seiscomp.datamodel.RealQuantity(dep)) + + time = seiscomp.core.Time() + time.fromString(tstr.replace("/", "-") + ":0:0", "%F %T") + self.origin.setTime(seiscomp.datamodel.TimeQuantity(time)) + + return True + + def createCommandLineDescription(self): + try: + self.commandline().addGroup("Parameters") + self.commandline().addStringOption("Parameters", + "coord", + "Latitude,longitude,depth of origin") + self.commandline().addStringOption("Parameters", + "time", "time of origin") + except: + seiscomp.logging.warning("caught unexpected error %s" % sys.exc_info()) + + def printUsage(self): + print('''Usage: + scsendorigin [options] + +Create an artificial origin and send to the messaging''') + + seiscomp.client.Application.printUsage(self) + + print('''Examples: +Send an artificial origin with hypocenter parameters to the messaging + scsendorigin --time "2022-05-01 10:00:00" --coord 52,12,10 +''') + + def run(self): + msg = seiscomp.datamodel.ArtificialOriginMessage(self.origin) + self.connection().send(msg) + return True + + +app = SendOrigin(len(sys.argv), sys.argv) +# app.setName("scsendorigin") +app.setMessagingUsername("scsendorg") +sys.exit(app()) diff --git a/bin/scshowevent b/bin/scshowevent new file mode 100755 index 0000000..6c10088 Binary files /dev/null and b/bin/scshowevent differ diff --git a/bin/scsohlog b/bin/scsohlog new file mode 100755 index 0000000..e90c3d2 --- /dev/null +++ b/bin/scsohlog @@ -0,0 +1,395 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +import sys, os, re +import seiscomp.core, seiscomp.client, seiscomp.logging, seiscomp.system + + +""" +Monitor application that connects to the messaging and collects all +information on the STATUS_GROUP to create an XML file ever N seconds. +It can furthermore call a configured script to trigger processing of the +produced XML file. +""" + +inputRegEx = re.compile("in\((?P[^\)]*)\)") +outputRegEx = re.compile("out\((?P[^\)]*)\)") + + +# Define all units of measure for available system SOH tags. Tags that are +# not given here are not processed. +Tests = { + "cpuusage": "%", + "clientmemoryusage": "kB", + "sentmessages": "cnt", + "receivedmessages": "cnt", + "messagequeuesize": "cnt", + "objectcount": "cnt", + "uptime": "s", + "dbadds": "row/s", + "dbupdates": "row/s", + "dbdeletes": "row/s" +} + + +#---------------------------------------------------------------------------- +# Class TestLog to hold the properties of a test. It also creates XML. +#---------------------------------------------------------------------------- +class TestLog: + def __init__(self): + self.value = None + self.uom = None + self.update = None + + def toXML(self, f, name): + f.write('= 1E-6: + f.write(' value="%f"' % fvalue) + else: + f.write(' value="%d"' % int(fvalue)) + except: + f.write(' value="%s"' % self.value) + if self.uom: + f.write(' uom="%s"' % self.uom) + if self.update: + f.write(' updateTime="%s"' % self.update) + f.write('/>') + + +#---------------------------------------------------------------------------- +# Class ObjectLog to hold the properties of a object log. It also creates +# XML. +#---------------------------------------------------------------------------- +class ObjectLog: + def __init__(self): + self.count = None + self.average = None + self.timeWindow = None + self.last = None + self.update = None + + def toXML(self, f, name, channel): + f.write('') + + +#---------------------------------------------------------------------------- +# Class Client that holds all tests and object logs of a particular client +# (messaging user name). +#---------------------------------------------------------------------------- +class Client: + def __init__(self): + self.pid = None + self.progname = None + self.host = None + + self.inputLogs = dict() + self.outputLogs = dict() + self.tests = dict() + + #---------------------------------------------------------------------------- + # Update/add (system) tests based on the passed tests dictionary retrieved + # from a status message. + #---------------------------------------------------------------------------- + def updateTests(self, updateTime, tests): + for name, value in list(tests.items()): + if name == "pid": + self.pid = value + elif name == "programname": + self.progname = value + elif name == "hostname": + self.host = value + + if name not in Tests: + continue + + # Convert d:h:m:s to seconds + if name == "uptime": + try: + t = [int(v) for v in value.split(":")] + except: + continue + if len(t) != 4: + continue + value = str(t[0]*86400+t[1]*3600+t[2]*60+t[3]) + + if name not in self.tests: + log = TestLog() + log.uom = Tests[name] + self.tests[name] = log + else: + log = self.tests[name] + log.value = value + log.update = updateTime + + #---------------------------------------------------------------------------- + # Update/add object logs based on the passed log text. The content is parsed. + #---------------------------------------------------------------------------- + def updateObjects(self, updateTime, log): + # Check input structure + v = inputRegEx.search(log) + if not v: + # Check out structure + v = outputRegEx.search(log) + if not v: + return + logs = self.outputLogs + else: + logs = self.inputLogs + + try: + tmp = v.group('params').split(',') + except: + return + + params = dict() + for p in tmp: + try: + param, value = p.split(':', 1) + except: + continue + params[param] = value + + name = params.get("name", "") + channel = params.get("chan", "") + if (name, channel) not in logs: + logObj = ObjectLog() + logs[(name, channel)] = logObj + else: + logObj = logs[(name, channel)] + + logObj.update = updateTime + logObj.count = params.get("cnt") + logObj.average = params.get("avg") + logObj.timeWindow = params.get("tw") + logObj.last = params.get("last") + + def toXML(self, f, name): + f.write('') + for name, log in list(self.tests.items()): + log.toXML(f, name) + if len(self.inputLogs) > 0: + f.write('') + for id, log in list(self.inputLogs.items()): + log.toXML(f, id[0], id[1]) + f.write('') + if len(self.outputLogs) > 0: + f.write('') + for id, log in list(self.outputLogs.items()): + log.toXML(f, id[0], id[1]) + f.write('') + f.write("") + + +#---------------------------------------------------------------------------- +# SC3 application class Monitor +#---------------------------------------------------------------------------- +class Monitor(seiscomp.client.Application): + def __init__(self, argc, argv): + seiscomp.client.Application.__init__(self, argc, argv) + self.setDatabaseEnabled(False, False) + self.setMembershipMessagesEnabled(True); + self.addMessagingSubscription(seiscomp.client.Protocol.STATUS_GROUP) + self.setMessagingUsername("") + self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP) + self._clients = dict() + self._outputScript = None + self._outputFile = "@LOGDIR@/server.xml" + self._outputInterval = 60 + + def createCommandLineDescription(self): + try: + self.commandline().addGroup("Output") + self.commandline().addStringOption("Output", "file,o", + "Specify the output file to create") + self.commandline().addIntOption("Output", "interval,i", + "Specify the output interval in seconds (default: 60)") + self.commandline().addStringOption("Output", "script", + "Specify an output script to be called after the output file is generated") + except: + seiscomp.logging.warning( + "caught unexpected error %s" % sys.exc_info()) + return True + + def initConfiguration(self): + if not seiscomp.client.Application.initConfiguration(self): + return False + + try: + self._outputFile = self.configGetString("monitor.output.file") + except: + pass + + try: + self._outputInterval = self.configGetInt("monitor.output.interval") + except: + pass + + try: + self._outputScript = self.configGetString("monitor.output.script") + except: + pass + + return True + + def init(self): + if not seiscomp.client.Application.init(self): + return False + + try: + self._outputFile = self.commandline().optionString("file") + except: + pass + + try: + self._outputInterval = self.commandline().optionInt("interval") + except: + pass + + try: + self._outputScript = self.commandline().optionString("script") + except: + pass + + self._outputFile = seiscomp.system.Environment.Instance().absolutePath(self._outputFile) + seiscomp.logging.info("Output file: %s" % self._outputFile) + + if self._outputScript: + self._outputScript = seiscomp.system.Environment.Instance().absolutePath(self._outputScript) + seiscomp.logging.info("Output script: %s" % self._outputScript) + + self._monitor = self.addInputObjectLog("status", seiscomp.client.Protocol.STATUS_GROUP) + self.enableTimer(self._outputInterval) + seiscomp.logging.info("Starting output timer with %d secs" % self._outputInterval) + + return True + + def printUsage(self): + print('''Usage: + scsohlog [options] + +Connect to the messaging collecting information sent from connected clients''') + + seiscomp.client.Application.printUsage(self) + + print('''Examples: +Create an output XML file every 60 seconds and execute a custom script to process the XML file + scsohlog -o stat.xml -i 60 --script process-stat.sh +''') + def handleNetworkMessage(self, msg): + # A state of health message + if msg.type == seiscomp.client.Packet.Status: + data = filter(None, msg.payload.split("&")) + self.updateStatus(msg.subject, data) + + # If a client disconnected, remove it from the list + elif msg.type == seiscomp.client.Packet.Disconnected: + if msg.subject in self._clients: + del self._clients[msg.subject] + + def handleDisconnect(self): + # If we got disconnected all client states are deleted + self._clients = dict() + + #---------------------------------------------------------------------------- + # Timeout handler called by the Application class. + # Write XML to configured output file and trigger configured script. + #---------------------------------------------------------------------------- + def handleTimeout(self): + if self._outputFile == "-": + self.toXML(sys.stdout) + sys.stdout.write("\n") + return + + try: + f = open(self._outputFile, "w") + except: + seiscomp.logging.error( + "Unable to create output file: %s" % self._outputFile) + return + + self.toXML(f) + f.close() + + if self._outputScript: + os.system(self._outputScript + " " + self._outputFile) + + #---------------------------------------------------------------------------- + # Write XML to stream f + #---------------------------------------------------------------------------- + def toXML(self, f): + f.write('') + f.write('' % self.messagingURL()) + for name, client in list(self._clients.items()): + client.toXML(f, name) + f.write('') + + def updateStatus(self, name, items): + if name not in self._clients: + self._clients[name] = Client() + + now = seiscomp.core.Time.GMT() + client = self._clients[name] + self.logObject(self._monitor, now) + + params = dict() + objs = [] + + for t in items: + try: + param, value = t.split("=", 1) + params[param] = value + except: + objs.append(t) + + if "time" in params: + update = params["time"] + del params["time"] + else: + update = now.iso() + + client.updateTests(update, params) + for o in objs: + client.updateObjects(update, o) + #client.toXML(sys.stdout, name) + + +app = Monitor(len(sys.argv), sys.argv) +sys.exit(app()) + diff --git a/bin/scvoice b/bin/scvoice new file mode 100755 index 0000000..491a686 --- /dev/null +++ b/bin/scvoice @@ -0,0 +1,502 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +############################################################################ + +import sys +import subprocess +import traceback + +from seiscomp import (client, core, datamodel, logging, seismology, system, + math) + +class VoiceAlert(client.Application): + + def __init__(self, argc, argv): + client.Application.__init__(self, argc, argv) + + self.setMessagingEnabled(True) + self.setDatabaseEnabled(True, True) + self.setLoadRegionsEnabled(True) + self.setMessagingUsername("") + self.setPrimaryMessagingGroup(client.Protocol.LISTENER_GROUP) + self.addMessagingSubscription("EVENT") + self.addMessagingSubscription("LOCATION") + self.addMessagingSubscription("MAGNITUDE") + + self.setAutoApplyNotifierEnabled(True) + self.setInterpretNotifierEnabled(True) + + self.setLoadCitiesEnabled(True) + self.setLoadRegionsEnabled(True) + + self._ampType = "snr" + self._citiesMaxDist = 20 + self._citiesMinPopulation = 50000 + + self._cache = None + self._eventDescriptionPattern = None + self._ampScript = None + self._alertScript = None + self._eventScript = None + + self._ampProc = None + self._alertProc = None + self._eventProc = None + + self._newWhenFirstSeen = False + self._prevMessage = {} + self._agencyIDs = [] + + def createCommandLineDescription(self): + self.commandline().addOption( + "Generic", "first-new", "calls an event a new event when it is " + "seen the first time") + self.commandline().addGroup("Alert") + self.commandline().addStringOption( + "Alert", "amp-type", "specify the amplitude type to listen to", + self._ampType) + self.commandline().addStringOption( + "Alert", "amp-script", "specify the script to be called when a " + "stationamplitude arrived, network-, stationcode and amplitude are " + "passed as parameters $1, $2 and $3") + self.commandline().addStringOption( + "Alert", "alert-script", "specify the script to be called when a " + "preliminary origin arrived, latitude and longitude are passed as " + "parameters $1 and $2") + self.commandline().addStringOption( + "Alert", "event-script", "specify the script to be called when an " + "event has been declared; the message string, a flag (1=new event, " + "0=update event), the EventID, the arrival count and the magnitude " + "(optional when set) are passed as parameter $1, $2, $3, $4 and $5") + self.commandline().addGroup("Cities") + self.commandline().addStringOption( + "Cities", "max-dist", "maximum distance for using the distance " + "from a city to the earthquake") + self.commandline().addStringOption( + "Cities", "min-population", "minimum population for a city to " + "become a point of interest") + self.commandline().addGroup("Debug") + self.commandline().addStringOption( + "Debug", "eventid,E", "specify Event ID") + return True + + def init(self): + if not client.Application.init(self): + return False + + try: + self._newWhenFirstSeen = self.configGetBool("firstNew") + except BaseException: + pass + + try: + agencyIDs = self.configGetStrings("agencyIDs") + for item in agencyIDs: + item = item.strip() + if item not in self._agencyIDs: + self._agencyIDs.append(item) + except BaseException: + pass + + try: + if self.commandline().hasOption("first-new"): + self._newWhenFirstSeen = True + except BaseException: + pass + + try: + self._eventDescriptionPattern = self.configGetString("poi.message") + except BaseException: + pass + + try: + self._citiesMaxDist = self.configGetDouble("poi.maxDist") + except BaseException: + pass + + try: + self._citiesMaxDist = self.commandline().optionDouble("max-dist") + except BaseException: + pass + + try: + self._citiesMinPopulation = self.configGetInt("poi.minPopulation") + except BaseException: + pass + + try: + self._citiesMinPopulation = self.commandline().optionInt("min-population") + except BaseException: + pass + + try: + self._ampType = self.commandline().optionString("amp-type") + except BaseException: + pass + + try: + self._ampScript = self.commandline().optionString("amp-script") + except BaseException: + try: + self._ampScript = self.configGetString("scripts.amplitude") + except BaseException: + logging.warning("No amplitude script defined") + + if self._ampScript: + self._ampScript = system.Environment.Instance().absolutePath(self._ampScript) + + try: + self._alertScript = self.commandline().optionString("alert-script") + except BaseException: + try: + self._alertScript = self.configGetString("scripts.alert") + except BaseException: + logging.warning("No alert script defined") + + if self._alertScript: + self._alertScript = system.Environment.Instance( + ).absolutePath(self._alertScript) + + try: + self._eventScript = self.commandline().optionString("event-script") + except BaseException: + try: + self._eventScript = self.configGetString("scripts.event") + logging.info( + "Using event script: %s" % self._eventScript) + except BaseException: + logging.warning("No event script defined") + + if self._eventScript: + self._eventScript = system.Environment.Instance() \ + .absolutePath(self._eventScript) + + logging.info("Creating ringbuffer for 100 objects") + if not self.query(): + logging.warning( + "No valid database interface to read from") + self._cache = datamodel.PublicObjectRingBuffer( + self.query(), 100) + + if self._ampScript and self.connection(): + self.connection().subscribe("AMPLITUDE") + + if self._newWhenFirstSeen: + logging.info( + "A new event is declared when I see it the first time") + + if not self._agencyIDs: + logging.info("agencyIDs: []") + else: + logging.info( + "agencyIDs: %s" % (" ".join(self._agencyIDs))) + + return True + + def printUsage(self): + + print('''Usage: + scvoice [options] + +Alert the user acoustically in real time. +''') + + client.Application.printUsage(self) + + print('''Examples: +Execute scvoice on command line with debug output + scvoice --debug +''') + + def run(self): + try: + try: + eventID = self.commandline().optionString("eventid") + event = self._cache.get(datamodel.Event, eventID) + if event: + self.notifyEvent(event) + except BaseException: + pass + + return client.Application.run(self) + except BaseException: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + return False + + def runAmpScript(self, net, sta, amp): + if not self._ampScript: + return + + if self._ampProc is not None: + if self._ampProc.poll() is None: + logging.warning( + "AmplitudeScript still in progress -> skipping message") + return + try: + self._ampProc = subprocess.Popen( + [self._ampScript, net, sta, "%.2f" % amp]) + logging.info( + "Started amplitude script with pid %d" % self._ampProc.pid) + except BaseException: + logging.error( + "Failed to start amplitude script '%s'" % self._ampScript) + + def runAlert(self, lat, lon): + if not self._alertScript: + return + + if self._alertProc is not None: + if self._alertProc.poll() is None: + logging.warning( + "AlertScript still in progress -> skipping message") + return + try: + self._alertProc = subprocess.Popen( + [self._alertScript, "%.1f" % lat, "%.1f" % lon]) + logging.info( + "Started alert script with pid %d" % self._alertProc.pid) + except BaseException: + logging.error( + "Failed to start alert script '%s'" % self._alertScript) + + def handleMessage(self, msg): + try: + dm = core.DataMessage.Cast(msg) + if dm: + for att in dm: + org = datamodel.Origin.Cast(att) + if not org: + continue + + try: + if org.evaluationStatus() == datamodel.PRELIMINARY: + self.runAlert(org.latitude().value(), + org.longitude().value()) + except BaseException: + pass + + #ao = datamodel.ArtificialOriginMessage.Cast(msg) + # if ao: + # org = ao.origin() + # if org: + # self.runAlert(org.latitude().value(), org.longitude().value()) + # return + + client.Application.handleMessage(self, msg) + except BaseException: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + + def addObject(self, parentID, arg0): + #pylint: disable=W0622 + try: + obj = datamodel.Amplitude.Cast(arg0) + if obj: + if obj.type() == self._ampType: + logging.debug("got new %s amplitude '%s'" % ( + self._ampType, obj.publicID())) + self.notifyAmplitude(obj) + + obj = datamodel.Origin.Cast(arg0) + if obj: + self._cache.feed(obj) + logging.debug("got new origin '%s'" % obj.publicID()) + + try: + if obj.evaluationStatus() == datamodel.PRELIMINARY: + self.runAlert(obj.latitude().value(), + obj.longitude().value()) + except BaseException: + pass + + return + + obj = datamodel.Magnitude.Cast(arg0) + if obj: + self._cache.feed(obj) + logging.debug( + "got new magnitude '%s'" % obj.publicID()) + return + + obj = datamodel.Event.Cast(arg0) + if obj: + org = self._cache.get( + datamodel.Origin, obj.preferredOriginID()) + agencyID = org.creationInfo().agencyID() + logging.debug("got new event '%s'" % obj.publicID()) + if not self._agencyIDs or agencyID in self._agencyIDs: + self.notifyEvent(obj, True) + except BaseException: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + + def updateObject(self, parentID, arg0): + try: + obj = datamodel.Event.Cast(arg0) + if obj: + org = self._cache.get(datamodel.Origin, obj.preferredOriginID()) + agencyID = org.creationInfo().agencyID() + logging.debug("update event '%s'" % obj.publicID()) + if not self._agencyIDs or agencyID in self._agencyIDs: + self.notifyEvent(obj, False) + except BaseException: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + + def notifyAmplitude(self, amp): + self.runAmpScript(amp.waveformID().networkCode(), + amp.waveformID().stationCode(), + amp.amplitude().value()) + + def notifyEvent(self, evt, newEvent=True): + try: + org = self._cache.get(datamodel.Origin, evt.preferredOriginID()) + if not org: + logging.warning("unable to get origin %s, ignoring event " + "message" % evt.preferredOriginID()) + return + + preliminary = False + try: + if org.evaluationStatus() == datamodel.PRELIMINARY: + preliminary = True + except BaseException: + pass + + if not preliminary: + nmag = self._cache.get( + datamodel.Magnitude, evt.preferredMagnitudeID()) + if nmag: + mag = nmag.magnitude().value() + mag = "magnitude %.1f" % mag + else: + if len(evt.preferredMagnitudeID()) > 0: + logging.warning( + "unable to get magnitude %s, ignoring event " + "message" % evt.preferredMagnitudeID()) + else: + logging.warning( + "no preferred magnitude yet, ignoring event message") + return + + # keep track of old events + if self._newWhenFirstSeen: + if evt.publicID() in self._prevMessage: + newEvent = False + else: + newEvent = True + + dsc = seismology.Regions.getRegionName( + org.latitude().value(), org.longitude().value()) + + if self._eventDescriptionPattern: + try: + city, dist, _ = self.nearestCity( + org.latitude().value(), org.longitude().value(), + self._citiesMaxDist, self._citiesMinPopulation) + if city: + dsc = self._eventDescriptionPattern + region = seismology.Regions.getRegionName( + org.latitude().value(), org.longitude().value()) + distStr = str(int(math.deg2km(dist))) + dsc = dsc.replace("@region@", region).replace( + "@dist@", distStr).replace("@poi@", city.name()) + except BaseException: + pass + + logging.debug("desc: %s" % dsc) + + dep = org.depth().value() + now = core.Time.GMT() + otm = org.time().value() + + dt = (now - otm).seconds() + + # if dt > dtmax: + # return + + if dt > 3600: + dt = "%d hours %d minutes ago" % (dt/3600, (dt % 3600)/60) + elif dt > 120: + dt = "%d minutes ago" % (dt/60) + else: + dt = "%d seconds ago" % dt + + if preliminary: + message = "earthquake, preliminary, %%s, %s" % dsc + else: + message = "earthquake, %%s, %s, %s, depth %d kilometers" % ( + dsc, mag, int(dep+0.5)) + # at this point the message lacks the "ago" part + + if evt.publicID() in self._prevMessage and \ + self._prevMessage[evt.publicID()] == message: + logging.info("Suppressing repeated message '%s'" % message) + return + + self._prevMessage[evt.publicID()] = message + message = message % dt # fill the "ago" part + logging.info(message) + + if not self._eventScript: + return + + if self._eventProc is not None: + if self._eventProc.poll() is None: + logging.warning( + "EventScript still in progress -> skipping message") + return + + try: + param2 = 0 + param3 = 0 + param4 = "" + if newEvent: + param2 = 1 + + org = self._cache.get( + datamodel.Origin, evt.preferredOriginID()) + if org: + try: + param3 = org.quality().associatedPhaseCount() + except BaseException: + pass + + nmag = self._cache.get( + datamodel.Magnitude, evt.preferredMagnitudeID()) + if nmag: + param4 = "%.1f" % nmag.magnitude().value() + + self._eventProc = subprocess.Popen( + [self._eventScript, message, "%d" % param2, evt.publicID(), + "%d" % param3, param4]) + logging.info( + "Started event script with pid %d" % self._eventProc.pid) + except BaseException: + logging.error( + "Failed to start event script '%s %s %d %d %s'" % ( + self._eventScript, message, param2, param3, param4)) + except BaseException: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + + +app = VoiceAlert(len(sys.argv), sys.argv) +sys.exit(app()) diff --git a/bin/scwfas b/bin/scwfas new file mode 100755 index 0000000..4ecd714 Binary files /dev/null and b/bin/scwfas differ diff --git a/bin/scwfparam b/bin/scwfparam new file mode 100755 index 0000000..a0ea77a Binary files /dev/null and b/bin/scwfparam differ diff --git a/bin/scxmldump b/bin/scxmldump new file mode 100755 index 0000000..921fa45 Binary files /dev/null and b/bin/scxmldump differ diff --git a/bin/scxmlmerge b/bin/scxmlmerge new file mode 100755 index 0000000..ad1938a Binary files /dev/null and b/bin/scxmlmerge differ diff --git a/bin/sczip b/bin/sczip new file mode 100755 index 0000000..97c009b Binary files /dev/null and b/bin/sczip differ diff --git a/bin/seiscomp b/bin/seiscomp new file mode 100755 index 0000000..4e0fd7f --- /dev/null +++ b/bin/seiscomp @@ -0,0 +1,55 @@ +#!/bin/sh -e + +# Resolve softlink to seiscomp executable first +if test -L "$0" +then + # $0 is a link + target="$(readlink "$0")" + case "$target" in + /*) + d="$target" + ;; + *) + d="$(dirname "$0")/$target" + ;; + esac +else + # $0 is NOT a link + case "$0" in + */* | /*) + d="$0" + ;; + *) + d="$(command -v "$0")" + ;; + esac +fi + +normalized_dirname() { + # Normalize directory name without following symlinks. + # Brute-force but portable. + cd "${1%/*}" && pwd || exit 1 +} + +# Determine the root directory of the 'seiscomp' utility. +d="$(normalized_dirname "$d")" +SEISCOMP_ROOT="$(realpath "${d%/bin}")" + +export SEISCOMP_ROOT +export PATH="$SEISCOMP_ROOT/bin:$PATH" +export LD_LIBRARY_PATH="$SEISCOMP_ROOT/lib:$LD_LIBRARY_PATH" +export PYTHONPATH="$SEISCOMP_ROOT/lib/python:$PYTHONPATH" +export MANPATH="$SEISCOMP_ROOT/share/man:$MANPATH" + +HOSTENV=$SEISCOMP_ROOT/etc/env/by-hostname/$(hostname) +test -f $HOSTENV && . $HOSTENV + +case $1 in + exec) + shift + exec "$@" + ;; + *) + exec $SEISCOMP_ROOT/bin/seiscomp-python "$SEISCOMP_ROOT/bin/seiscomp-control.py" "$@" + ;; +esac diff --git a/bin/seiscomp-control.py b/bin/seiscomp-control.py new file mode 100755 index 0000000..98eac0a --- /dev/null +++ b/bin/seiscomp-control.py @@ -0,0 +1,1534 @@ +#!/usr/bin/env seiscomp-python + +from __future__ import division, print_function + +import glob +import importlib +import math +import os +import platform +import shutil +import signal +import socket +import subprocess +import sys +import traceback +import seiscomp.shell + +# Problem: if +# import seiscomp.config +# fails, then in any case a sometimes misleading exception +# ImportError: No module named _config +# is raised, even if the seiscomp._config module exists but for +# another reason fails to import. We therefore... +import seiscomp._config +# ...here explicitly to get a meaningful exception if this fails. + +import seiscomp.config +import seiscomp.kernel + +# Python version depended string conversion +if sys.version_info[0] < 3: + py3ustr = str +else: + py3ustr = lambda s: s.decode('utf-8', 'replace') + +try: + real_raw_input = raw_input +except NameError: + real_raw_input = input + + +# request and optionally enforce user input +# @param question The question to be answered. +# @param default The default value to use if no input was made +# @param options List or string of available options. If defined the input must +# match one of the options (unless a default is specified). If the input +# is invalid the question is repeated. +def getInput(question, default=None, options=None): + + def _default(text): + # print default value to previous line if no input was made + if default: + print("\033[F\033[{}G{}".format(len(text)+1, default)) + return default + + # no options: accept any type of input + if not options: + defaultStr = "" if default is None else " [{}]".format(default) + question = "{}{}: ".format(question, defaultStr) + return real_raw_input(question) or _default(question) + + if default is not None: + default = str(default).lower() + + # options supplied: check and enforce input + opts = [str(o).lower() for o in options] + optStr = "/".join(o.upper() if o == default else o for o in opts) + question = "{} [{}]: ".format(question, optStr) + while True: + res = real_raw_input(question) + if not res and default: + return _default(question) + + if res.lower() in opts: + return res.lower() + + +if sys.platform == "darwin": + SysLibraryPathVar = "DYLD_FALLBACK_LIBRARY_PATH" + SysFrameworkPathVar = "DYLD_FALLBACK_FRAMEWORK_PATH" +else: + SysLibraryPathVar = "LD_LIBRARY_PATH" + SysFrameworkPathVar = None + + +def get_library_path(): + if sys.platform == "darwin": + return LD_LIBRARY_PATH + ":" + DYLD_FALLBACK_FRAMEWORK_PATH + + return LD_LIBRARY_PATH + + +def get_framework_path(): + return DYLD_FALLBACK_FRAMEWORK_PATH + + +# Python 3 compatible string check +def is_string(variable): + try: + string_class = basestring + except NameError: + string_class = str + + return isinstance(variable, string_class) + + +# ------------------------------------------------------------------------------ +# Helper functions +# ------------------------------------------------------------------------------ +SIGTERM_SENT = False + + +def sigterm_handler(_signum, _): + # pylint: disable=W0603 + global SIGTERM_SENT + if not SIGTERM_SENT: + SIGTERM_SENT = True + os.killpg(0, signal.SIGTERM) + + sys.exit() + + +def system(args): + proc = subprocess.Popen(args, shell=False, env=os.environ) + while True: + try: + return proc.wait() + except KeyboardInterrupt: + continue + except Exception as e: + try: + proc.terminate() + except Exception: + pass + sys.stderr.write("Exception: %s\n" % str(e)) + continue + + # return subprocess.call(cmd, shell=True) + + +def error(msg): + sys.stderr.write("error: %s\n" % msg) + sys.stderr.flush() + + +def warning(msg): + sys.stderr.write("warning: %s\n" % msg) + sys.stderr.flush() + + +# Returns a seiscomp.kernel.Module instance +# from a given path with a given name +def load_module(path): + modname0 = os.path.splitext(os.path.basename(path))[0].replace('.', '_') + modname = '__seiscomp_modules_' + modname0 + + if modname in sys.modules: + mod = sys.modules[modname] + else: + if sys.path[0] != INIT_PATH: + sys.path.insert(0, INIT_PATH) + mod = importlib.import_module(modname0) + mod.__file__ = path + + # store it in sys.modules + sys.modules[modname] = mod + + module = mod.Module + + return module + + +def module_key(module): + return (module.order, module.name) + + +def load_init_modules(path): + modules = [] + + if not os.path.exists(path): + error("Cannot load any module - path not existing: %s" % path) + return modules + + files = glob.glob(os.path.join(path, "*.py")) + for f in files: + try: + pmod = load_module(f) + except Exception as exc: + error(("%s: " % f) + str(exc)) + continue + + try: + mod = pmod(env) # .Module(env) + except Exception as exc: + error(("%s: " % f) + str(exc)) + continue + + modules.append(mod) + + #mods = sorted(mods, key=lambda mod: mod.order) + modules = sorted(modules, key=module_key) + + return modules + + +def get_module(name): + for module in mods: + if module.name == name: + return module + return None + + +def has_module(name): + return get_module(name) is not None + + +def dump_paths(): + print('--------------------') + print('SEISCOMP_ROOT="%s"' % SEISCOMP_ROOT) + print('PATH="%s"' % os.environ["PATH"]) + print('%s="%s"' % (SysLibraryPathVar, os.environ[SysLibraryPathVar])) + if SysFrameworkPathVar: + print( + '%s="%s"' % + (SysFrameworkPathVar, + os.environ[SysFrameworkPathVar])) + print('PYTHONPATH="%s"' % sys.path) + print('CWD="%s"' % os.getcwd()) + print('--------------------') + + +# Returns whether a module should run or not. It simply returns if its +# runfile exists. +def shouldModuleRun(mod_name): + return os.path.exists(env.runFile(mod_name)) + + +def touch(filename): + try: + open(filename, 'w').close() + except Exception as exc: + error(str(exc)) + + +def start_module(mod): + # Create runfile + touch(env.runFile(mod.name)) + return mod.start() + + +def stop_module(mod): + try: + if not mod.stop(): + error("Failed to stop %s: unknown error" % mod.name) + return 1 + except Exception as e: + error("Failed to stop %s: %s" % (mod.name, str(e))) + return 1 + + # Delete runfile + try: + os.remove(env.runFile(mod.name)) + except BaseException: + return 1 + + return 0 + + +def start_kernel_modules(): + for mod in mods: + if isinstance(mod, seiscomp.kernel.CoreModule): + return start_module(mod) + + return 1 + + +def stop_kernel_modules(): + for mod in reversed(mods): + if isinstance(mod, seiscomp.kernel.CoreModule): + return stop_module(mod) + + return 1 + +def detectOS(): + OSReleaseMap = { + 'centos': 'rhel', + 'rocky': 'rhel', + 'raspbian': 'debian' + } + + try: + arch = platform.machine() + except BaseException: + arch = 'x86_64' + + data = {} + with open('/etc/os-release', 'r') as f: + for line in f: + toks = line.split("=") + if len(toks) != 2: + continue + + data[toks[0].strip().upper()] = toks[1].strip() + + osID = OSReleaseMap.get(data['ID'].strip('"')) + if not osID: + osID = data['ID'].strip('"') + + version = data['VERSION_ID'].strip('"') + if osID == 'rhel': + try: + version = str(math.floor(float(version))) + except Exception: + pass + + name = data['NAME'].strip('"') + return name, osID, version, arch + +# ------------------------------------------------------------------------------ +# Commandline action handler +# ------------------------------------------------------------------------------ +def on_setup(args, flags): + # pylint: disable=W0621 + import seiscomp.setup + + if "stdin" in flags: + cfg = seiscomp.config.Config() + if not cfg.readConfig("-"): + error("invalid configuration from stdin") + return 1 + else: + setup = seiscomp.setup.Simple() + cfg = setup.run(env) + + retCode = 0 + + for mod in config_mods: + if len(args) == 0 or mod.name in args: + try: + hasSetupHandler = callable(getattr(mod, 'setup')) + except BaseException: + hasSetupHandler = False + + if hasSetupHandler: + print("* setup %s" % mod.name) + if mod.setup(cfg) != 0: + error("module '%s' failed to setup" % mod.name) + retCode = 1 + + if retCode == 0: + runpath = os.path.join(SEISCOMP_ROOT, "var", "run") + if not os.path.exists(runpath): + try: + os.makedirs(runpath) + except BaseException: + error("failed to create directory: %s" % runpath) + + statfile = os.path.join(runpath, "seiscomp.init") + if not os.path.exists(statfile): + try: + open(statfile, "w").close() + except BaseException: + error("failed to create status file: %s" % statfile) + + return retCode + + +def on_setup_help(_): + print("Initialize the configuration of all available modules. Each module") + print("implements its own setup handler which is called at this point. The") + print("initialization takes the installation directory into account and") + print("should be repeated when copying the system to another directory.") + print("NOTE:") + print("Setup might overwrite already made settings with default values.") + return 0 + + +def on_shell(_args, _): + shell = seiscomp.shell.CLI() + try: + shell.run(env) + except Exception as e: + error(str(e)) + return 1 + return 0 + + +def on_shell_help(_): + print("Launches the SeisComP shell, a commandline interface which allows") + print("to manage modules configurations and bindings.") + return 0 + + +def on_enable(args, _): + if not args: + error("module name required") + return 1 + + for name in args: + modName = get_module(name) + if modName is None: + error("%s is not available" % name) + elif isinstance(modName, seiscomp.kernel.CoreModule): + error("%s is a kernel module and is enabled automatically" % name) + else: + env.enableModule(name) + return 0 + + +def on_enable_help(_): + print("Enables all given modules to be started when 'seiscomp start' is") + print("invoked without a module list.") + print() + print("Examples:") + print("seiscomp enable seedlink slarchive") + + +def on_disable(args, _): + if not args: + error("module name required") + return 1 + + for name in args: + modName = get_module(name) + if modName is None: + error("%s is not available" % modName) + elif isinstance(modName, seiscomp.kernel.CoreModule): + error("%s is a kernel module and cannot be disabled" % name) + else: + env.disableModule(name) + return 0 + + +def on_disable_help(_): + print("Disables all given modules. See 'enable'.") + print() + print("Examples:") + print("seiscomp disable seedlink slarchive") + + +def on_start(args, _): + cntStarted = 0 + if not args: + if start_kernel_modules() == 0: + cntStarted += 1 + for mod in mods: + # Kernel modules have been started already + if isinstance(mod, seiscomp.kernel.CoreModule): + continue + # Module in autorun? + if env.isModuleEnabled(mod.name): + if start_module(mod) == 0: + cntStarted += 1 + else: + for mod in mods: + if mod.name in args or len(args) == 0: + if start_module(mod) == 0: + cntStarted += 1 + + if not useCSV: + print("Summary: {} modules started".format(cntStarted)) + + return 0 + + +def on_start_help(_): + print("Starts all enabled modules or a list of modules given.") + print() + print("Examples:") + print("seiscomp start") + print("seiscomp start seedlink slarchive") + + +def on_stop(args, _): + cntStopped = 0 + if not args: + for mod in reversed(mods): + # Kernel modules will be stopped latter + if isinstance(mod, seiscomp.kernel.CoreModule): + continue + if stop_module(mod) == 0: + cntStopped += 1 + + # Stop all kernel modules + if stop_kernel_modules() == 0: + cntStopped += 1 + else: + for mod in reversed(mods): + if mod.name in args or len(args) == 0: + if stop_module(mod) == 0: + cntStopped += 1 + + if not useCSV: + print("Summary: {} modules stopped".format(cntStopped)) + + return 0 + + +def on_stop_help(_): + print("Stops all enabled modules or a list of modules given.") + print() + print("Examples:") + print("seiscomp stop") + print("seiscomp stop seedlink slarchive") + + +def on_restart(args, flags): + on_stop(args, flags) + on_start(args, flags) + return 0 + + +def on_restart_help(_): + print("Restarts all enabled modules or a list of modules given.") + print("This command is equal to:") + print("seiscomp stop {args}") + print("seiscomp start {args}") + print() + print("Examples:") + print("seiscomp restart") + print("seiscomp restart seedlink slarchive") + + +def on_reload(args, _): + if not args: + for mod in mods: + # Reload not supported by kernel modules + if isinstance(mod, seiscomp.kernel.CoreModule): + continue + + if shouldModuleRun(mod.name): + mod.reload() + else: + for mod in mods: + if mod.name in args or len(args) == 0: + mod.reload() + + return 0 + + +def on_reload_help(_): + print("Reloads all enabled modules or a list of modules given.") + print("This operation is module specific and implemented only for some") + print("modules.") + print() + print("Examples:") + print("seiscomp reload") + print("seiscomp reload fdsnws") + + +def on_check(args, _): + cntStarted = 0 + for mod in mods: + if mod.name in args or len(args) == 0: + if shouldModuleRun(mod.name): + cntStarted += 1 + mod.check() + + if not useCSV: + print("Summary: {} started modules checked".format(cntStarted)) + + return 0 + + +def on_check_help(_): + print("Checks if a started module is still running. If not, it is") + print("restarted. If no modules are given, all started modules are") + print("checked.") + print() + print("Examples:") + print("$ seiscomp check seedlink") + print("seedlink is already running") + + +def on_exec(args, _): + if len(args) < 1: + error("no module name given") + return False + + # Change back into the working dir + env.chback() + return system(args) + + +def on_exec_help(_): + print("Executes a command like calling a command from commandline.") + print("It will setup all paths and execute the command.") + print("'seiscomp run' will block until the command terminates.") + print("Example:") + print("seiscomp exec scolv") + + +def on_list(args, _): + if len(args) < 1: + error("expected argument: {modules|aliases|enabled|disabled|started}") + return 1 + + if args[0] == "modules": + found = 0 + for mod in mods: + if env.isModuleEnabled(mod.name) or \ + isinstance(mod, seiscomp.kernel.CoreModule): + state = "enabled" + else: + state = "disabled" + found += 1 + print("%s is %s" % (mod.name, state)) + + if not useCSV: + print("Summary: {} modules reported".format(found)) + + return 0 + + if args[0] == "aliases": + f = open(ALIAS_FILE, 'r') + lines = [line.rstrip() for line in f.readlines()] + for line in lines: + if line.lstrip().startswith('#') or not line.strip(): + continue + toks = [t.strip() for t in line.split('=')] + # Remove invalid lines + if len(toks) != 2: + continue + if useCSV: + print("%s;%s" % (toks[0], toks[1])) + else: + print("%s -> %s" % (toks[0], toks[1])) + f.close() + return 0 + + if args[0] == "enabled": + found = 0 + for mod in mods: + if env.isModuleEnabled(mod.name) or \ + isinstance(mod, seiscomp.kernel.CoreModule): + print(mod.name) + found += 1 + + if not useCSV: + print("Summary: {} modules enabled".format(found)) + + return 0 + + if args[0] == "disabled": + found = 0 + for mod in mods: + if not env.isModuleEnabled(mod.name) and \ + not isinstance(mod, seiscomp.kernel.CoreModule): + print(mod.name) + found += 1 + + if not useCSV: + print("Summary: {} modules disabled".format(found)) + + return 0 + + if args[0] == "started": + found = 0 + for mod in mods: + if shouldModuleRun(mod.name): + print(mod.name) + found += 1 + + if not useCSV: + print("Summary: {} modules started".format(found)) + + return 0 + + error( + "wrong argument: {modules|aliases|enabled|disabled|started} expected") + return 1 + + +def on_list_help(_): + print("Prints the result of a query. 5 queries are currently supported:") + print(" modules: lists all existing modules") + print(" aliases: lists all existing aliases") + print(" enabled: lists all enabled modules") + print(" disabled: lists all disabled modules") + print(" started: lists all started modules") + print() + print("Examples:") + print("$ seiscomp list aliases") + print("l1autopick -> scautopick") + + +def on_status(args, _): + found = 0 + if len(args) > 0 and args[0] == "enabled": + for mod in mods: + if env.isModuleEnabled(mod.name) or isinstance( + mod, seiscomp.kernel.CoreModule): + mod.status(shouldModuleRun(mod.name)) + found += 1 + + if not useCSV: + print("Summary: {} modules enabled".format(found)) + + return 0 + + if len(args) > 0 and args[0] == "started": + for mod in mods: + if shouldModuleRun(mod.name): + mod.status(shouldModuleRun(mod.name)) + found += 1 + + if not useCSV: + print("Summary: {} modules started".format(found)) + + return 0 + + for mod in mods: + if mod.name in args or len(args) == 0: + mod.status(shouldModuleRun(mod.name)) + found += 1 + + if not useCSV: + print("Summary: {} modules reported".format(found)) + return 0 + + +def on_status_help(_): + print("Prints the status of ") + print(" * all modules") + print(" * all enabled modules") + print(" * all started modules") + print(" * a list of modules") + print("and gives a warning if a module should run but doesn't.") + print("This command supports csv formatted output via '--csv' switch.") + print() + print("Examples:") + print("$ seiscomp status started") + print("$ seiscomp status enabled") + print("scmaster is not running [WARNING]") + print("$ seiscomp status scautopick") + print("scautopick is not running") + print("$ seiscomp --csv status scautopick") + print("scautopick;0;0;0") + print() + print("CSV format:") + print(" column 1: module name") + print(" column 2: running flag") + print(" column 3: should run flag") + print(" column 4: enabled flag") + + +def on_print(args, _): + if len(args) < 1: + error("expected argument: {crontab|env}") + return 1 + + if args[0] == "crontab": + print("*/3 * * * * %s check >/dev/null 2>&1" % + os.path.join(env.SEISCOMP_ROOT, "bin", "seiscomp")) + for mod in mods: + mod.printCrontab() + elif args[0] == "env": + print('export SEISCOMP_ROOT="%s"' % SEISCOMP_ROOT) + print('export PATH="%s:$PATH"' % BIN_PATH) + print('export %s="%s:$%s"' % + (SysLibraryPathVar, get_library_path(), SysLibraryPathVar)) + if sys.platform == "darwin": + print( + 'export %s="%s:$%s"' % + (SysFrameworkPathVar, + get_framework_path(), + SysFrameworkPathVar)) + + print('export PYTHONPATH="%s:$PYTHONPATH"' % PYTHONPATH) + print('export MANPATH="%s:$MANPATH"' % MANPATH) + print( + 'source "%s/share/shell-completion/seiscomp.bash"' % + SEISCOMP_ROOT) + hostenv = os.path.join(SEISCOMP_ROOT, "etc", "env", "by-hostname", + socket.gethostname()) + if os.path.isfile(hostenv): + print('source %s' % hostenv) + else: + error("wrong argument: {crontab|env} expected") + return 1 + + return 0 + + +def on_print_help(_): + print("seiscomp print {crontab|env}") + print(" crontab: prints crontab entries of all registered or given modules.") + print(" env: prints environment variables necessary to run SeisComP modules.") + print() + print("Examples:") + print("Source SC environment into current bash session") + print("$ eval $(seiscomp/bin/seiscomp print env)") + + +def on_install_deps_linux(args, _): + + try: + name, release, version, arch = detectOS() + except BaseException as err: + print("*********************************************************************") + print("seiscomp was not able to figure out the installed distribution") + print("You need to check the documentation for required packages and install") + print("them manually.") + print("Error: {}".format(err)) + print("*********************************************************************") + + return 1 + + print("Distribution: {}-{}-{}({}-{})".format(name, version, arch, release, version)) + + for n in range(version.count('.') + 1): + ver = version.rsplit('.', n)[0] + script_dir = os.path.join( + env.SEISCOMP_ROOT, "share", "deps", release.lower(), ver.lower()) + if os.path.exists(script_dir): + break + + if not os.path.exists(script_dir): + print("*********************************************************************") + print("Sorry, the installed distribution is not supported.") + print("You need to check the documentation for required packages and install") + print("them manually.") + print("*********************************************************************") + return 1 + + for pkg in args: + script = os.path.join(script_dir, "install-" + pkg + ".sh") + if not os.path.exists(script): + error("no handler available for package '%s'" % pkg) + return 1 + + if system(["sudo", "sh", script]) != 0: + error("installation failed") + return 1 + + return 0 + + +def on_install_deps(args, flags): + if not args: + error("expected package list: PKG1 [PKG2 [..]]") + print("Example: seiscomp install-deps base gui mysql-server") + print("For a list of available packages issue: seiscomp help install-deps") + + if sys.platform.startswith("linux"): + return on_install_deps_linux(args, flags) + + error("unsupported platform") + print("*********************************************************************") + print("The platform you are currently running on is not supported to install") + print("dependencies automatically.") + print("You need to check the documentation for required packages and install") + print("them manually.") + print("*********************************************************************") + return 1 + + +def on_install_deps_help(_): + print("seiscomp install-deps PKG1 [PKG2 [..]]") + print("Installs OS dependencies to run SeisComP. This requires either a 'sudo'") + print("or root account. Available packages are:") + print(" base: basic packages required by all installations") + print(" gui: required by graphical user interfaces, e.g. on workstations") + print(" [mysql,mariadb,postgresql]-server:") + print(" database management system required by the machine running") + print(" the SeisComP messaging system (scmaster)") + print(" fdsnws: required for data sharing via the FDSN web services") + + return 0 + + +def on_update_config(args, _): + kernelModsStarted = False + configuredMods = {} + + listOfMods = args + if not listOfMods: + listOfMods = [] + for mod in config_mods: + listOfMods.append(mod.name) + + while len(listOfMods) > 0: + for mod in config_mods: + if mod.name in listOfMods: + if not kernelModsStarted and mod.requiresKernelModules(): + print("* starting kernel modules") + start_kernel_modules() + kernelModsStarted = True + print("* configure %s" % mod.name) + + proxy = None + + try: + proxy = mod.updateConfigProxy() + if is_string(proxy): + configuredMods.setdefault(proxy, False) + except Exception: + pass + + if proxy is None: + result = mod.updateConfig() + + try: + error_code = int(result) + except ValueError: + error("unexpected return type when updating " + "configuration of %s" % mod.name) + return 1 + + if error_code != 0: + error( + "updating configuration for %s failed" % mod.name) + return 1 + + configuredMods[mod.name] = True + + listOfMods = [] + # Collect all unconfigured but indirectly requested mods + for name, configured in configuredMods.items(): + if not configured: + listOfMods.append(name) + + return 0 + + +def on_update_config_help(_): + print("Updates the configuration of all available modules. This command") + print("will convert the etc/*.cfg to the modules native configuration") + print("including its bindings.") + return 0 + + +def on_alias(args, _): + if len(args) < 2: + error("expected arguments: {create|remove} ALIAS_NAME APP_NAME") + return 1 + + aliasName = args[1] + + if args[0] == "create": + if len(args) != 3: + error("expected two arguments for create: ALIAS_NAME APP_NAME") + return 1 + + mod = None + for module in mods: + if module.name == args[2]: + mod = module + break + + if not mod: + error("module '%s' not found" % args[2]) + return 1 + + supportsAliases = False + try: + supportsAliases = mod.supportsAliases() + except BaseException: + pass + + if not supportsAliases: + error("module '%s' does not support aliases" % args[2]) + return 1 + + mod2 = args[2] + if os.path.exists(os.path.join("bin", mod2)): + mod1 = os.path.join("bin", aliasName) + elif os.path.exists(os.path.join("sbin", mod2)): + mod1 = os.path.join("sbin", aliasName) + else: + error("no %s binary found (neither bin nor sbin)") + return 1 + + # create alias line in etc/descriptions/aliases + if not os.path.exists(DESC_PATH): + try: + os.makedirs(DESC_PATH) + except Exception: + error("failed to create directory: %s" % DESC_PATH) + return 1 + + has_alias = False + lines = [] + new_lines = [] + try: + f = open(ALIAS_FILE, 'r') + lines = [line.rstrip() for line in f.readlines()] + for line in lines: + if line.lstrip().startswith('#') or not line.strip(): + # Keep comments or empty lines + new_lines.append(line) + continue + toks = [t.strip() for t in line.split('=')] + # Remove invalid lines + if len(toks) != 2: + continue + if toks[0] == aliasName: + has_alias = True + break + + new_lines.append(line) + f.close() + except BaseException: + pass + + if has_alias: + warning("%s is already registered as alias for %s in " \ + "$SEISCOMP_ROOT/etc/descriptions/aliases" % (aliasName, toks[1])) + warning(" + do not register again but trying to link the required files") + else: + print( + "Registered alias '%s' in $SEISCOMP_ROOT/etc/descriptions/aliases" % + (aliasName)) + + # Check if target exists already + if os.path.exists(os.path.join(SEISCOMP_ROOT, mod1)): + warning( + "link '%s' to '%s' exists already in %s/bin/" % + (aliasName, mod2, SEISCOMP_ROOT)) + warning(" + do not link again") + + try: + f = open(ALIAS_FILE, 'w') + except BaseException: + error("failed to open/create alias file: %s" % ALIAS_FILE) + return 1 + + new_lines.append("%s = %s" % (aliasName, args[2])) + + f.write("\n".join(new_lines) + "\n") + f.close() + + # create symlink of defaults from etc/defaults/mod1.cfg to etc/defaults/mod2.cfg + # use relative path to default_cfg2 + cwdAlias = os.getcwd() + os.chdir(os.path.join(SEISCOMP_ROOT, "etc", "defaults")) + default_cfg1 = aliasName + ".cfg" + default_cfg2 = args[2] + ".cfg" + if os.path.exists(default_cfg2): + print("Linking default configuration: %s -> %s" % + (default_cfg2, default_cfg1)) + # - first: remove target + try: + os.remove(default_cfg1) + except BaseException: + pass + # create symlink + os.symlink(os.path.relpath(default_cfg2), default_cfg1) + else: + print("No default configuration to link") + # return to initial directory + os.chdir(cwdAlias) + + # create symlink from bin/mod1 to bin/mod2 + # - first: remove target + try: + os.remove(os.path.join(SEISCOMP_ROOT, mod1)) + except BaseException: + pass + print("Creating app symlink: %s -> %s" % (mod2, mod1)) + os.symlink(mod2, os.path.join(SEISCOMP_ROOT, mod1)) + + # create symlink from etc/init/mod1.py to etc/init/mod2.py + cwdAlias = os.getcwd() + os.chdir(os.path.join(SEISCOMP_ROOT, "etc", "init")) + init1 = aliasName + ".py" + init2 = args[2] + ".py" + print("Linking init script: %s -> %s" % (init2, init1)) + # - first: remove target + try: + os.remove(init1) + except BaseException: + pass + # create symlink with relative path + os.symlink(os.path.relpath(init2), init1) + # return to initial directory + os.chdir(cwdAlias) + + return 0 + + if args[0] == "remove": + if len(args) != 2: + error("expected one argument for remove: alias-name") + return 1 + + print("Removing alias '%s'" % aliasName) + # check and remove alias line in etc/descriptions/aliases + has_alias = False + lines = [] + new_lines = [] + try: + f = open(ALIAS_FILE, 'r') + lines = [line.rstrip() for line in f.readlines()] + for line in lines: + if line.lstrip().startswith('#') or not line.strip(): + # Keep comments or empty lines + new_lines.append(line) + continue + toks = [t.strip() for t in line.split('=')] + # Remove invalid lines + if len(toks) != 2: + continue + if toks[0] == aliasName: + has_alias = True + else: + new_lines.append(line) + f.close() + except BaseException: + pass + + if not has_alias: + print(" + {} is not defined as an alias".format(aliasName)) + if not interactiveMode: + print(" + remove related configuration with '--interactive'") + if len(lines) == len(new_lines): + return 1 + + try: + f = open(ALIAS_FILE, 'w') + except BaseException: + error(" + failed to open/create alias file: %s" % ALIAS_FILE) + return 1 + + if len(lines) > 0: + f.write("\n".join(new_lines) + "\n") + f.close() + + if not has_alias: + if not interactiveMode: + return 1 + + # remove symlink from bin/mod1 + if os.path.exists(os.path.join("bin", aliasName)): + sym_link = os.path.join("bin", aliasName) + elif os.path.exists(os.path.join("sbin", aliasName)): + sym_link = os.path.join("sbin", aliasName) + else: + sym_link = "" + + if sym_link: + print(" + removing app symlink: %s" % sym_link) + try: + os.remove(os.path.join(SEISCOMP_ROOT, sym_link)) + except BaseException: + pass + + # remove symlink from etc/init/mod1.py + init_scr = os.path.join("etc", "init", aliasName + ".py") + print(" + removing init script: %s" % init_scr) + try: + os.remove(os.path.join(SEISCOMP_ROOT, init_scr)) + except BaseException: + pass + + # delete defaults etc/defaults/mod1.cfg + default_cfg = os.path.join("etc", "defaults", aliasName + ".cfg") + print(" + removing default configuration: {}/{}" + .format(SEISCOMP_ROOT, default_cfg)) + try: + os.remove(os.path.join(SEISCOMP_ROOT, default_cfg)) + except BaseException as e: + error(" + could not remove %s" % e) + + if not interactiveMode: + warning("No other configuration removed for '%s' - interactive" + " removal is supported by '--interactive'" % aliasName) + return 0 + + # test module configuration files + # SYSTEMCONFIGDIR + cfg = os.path.join("etc", aliasName + ".cfg") + if os.path.isfile(cfg): + print(" + found module configuration file: {}/{}" + .format(SEISCOMP_ROOT, cfg)) + answer = getInput(" + do you wish to remove it?", 'n', 'yn') + if answer == "y": + try: + os.remove(cfg) + except Exception as e: + error(" + could not remove '%s' - try manually" % e) + + # CONFIGDIR + cfg = os.path.join( + os.path.expanduser("~"), + ".seiscomp", + aliasName + ".cfg") + if os.path.isfile(cfg): + print(" + found module configuration file: {}".format(cfg)) + answer = getInput(" + do you wish to remove it?", 'n', 'yn') + if answer == "y": + try: + os.remove(cfg) + except Exception as e: + error(" + could not remove the file: %s - try manually" % e) + + # test module binding files + bindingDir = os.path.join(SEISCOMP_ROOT, "etc", "key", aliasName) + if os.path.exists(bindingDir): + print(" + found binding directory: {}".format(bindingDir)) + answer = getInput(" + do you wish to remove it?", 'n', 'yn') + if answer == "y": + try: + shutil.rmtree(bindingDir) + except Exception as e: + error(" + could not remove the directory: %s - try manually" % e) + + # test key files + keyDir = os.path.join(SEISCOMP_ROOT, 'etc', 'key') + dirContent = os.listdir(keyDir) + keyFiles = [] + print(" + testing key files") + for f in dirContent: + if not os.path.isfile(os.path.join(keyDir, f)) or \ + not f.startswith("station_"): + continue + + keyFile = os.path.join(keyDir, f) + with open(keyFile, 'r') as fp: + # Read all lines in the file one by one + for line in fp: + # check if the line starts with the module name + if line.startswith(aliasName): + keyFiles.append(keyFile) + print(" + found binding for '{}' in: {}".format(aliasName, keyFile)) + + if keyFiles: + print(" + found {} bindings for '{}' in key files".format(len(keyFiles), aliasName)) + question = " + remove all '{}' bindings from key files?".format(aliasName) + answer = getInput(question, 'n', 'yn') + if answer == "y": + shell = seiscomp.shell.CLI(env) + shell.commandRemove(["module", aliasName, "*.*"]) + else: + print(" + found no key files") + + return 0 + + error("Wrong command '%s': expected 'create' or 'remove'" % args[0]) + return 1 + + +def on_alias_help(_): + print("seiscomp alias {create|remove} ALIAS_NAME APP_NAME") + print("Creates/removes symlinks to applications. Symlinks to symlinks are not allowed.") + print() + print("Examples:") + print("$ seiscomp alias create scautopick2 scautopick") + print("Copy default configuration: etc/defaults/scautopick.cfg -> etc/defaults/scautopick2.cfg") + print("Create app symlink: scautopick -> bin/scautopick2") + print("Copy init script: etc/init/scautopick.py -> etc/init/scautopick2.py") + print() + print("$ seiscomp alias remove scautopick2") + print("Remove default configuration: etc/defaults/scautopick2.cfg") + print("Remove app symlink: bin/scautopick2") + print("Remove init script: etc/init/scautopick2.py") + + +allowed_actions = [ + "install-deps", + "setup", + "shell", + "enable", + "disable", + "start", + "stop", + "restart", + "reload", + "check", + "status", + "list", + "exec", + "update-config", + "alias", + "print", + "help" +] + + +# Define all actions that do not need locking of seiscomp +actions_without_lock = [ + # "install-deps", + "help", + "list", + "exec", + "print" +] + + +def on_help(args, _): + if not args: + print("Name:") + print(" seiscomp - Load the environment of the SeisComP installation from " \ + "where seiscomp is executed and run a command") + print("\nSynopsis:") + print(" seiscomp [flags] [commands] [arguments]") + print("\nFlags:") + print(" --asroot Allow running a command as root") + print(" --csv Print output as csv in machine-readable format") + print(" -i, [--interactive] Interactive mode: Allow deleting files " \ + "interactively when removing aliases") + print(" --wait arg Define a timeout in seconds for acquiring the seiscomp " \ + "lock file, e.g. `seiscomp --wait 10 update-config`") + print("\nAvailable commands:") + for helpAction in allowed_actions: + print(" %s" % helpAction) + + print("\nUse 'help [command]' to get more help about a command") + print("\nExamples:") + print(" seiscomp help update-config Show help for update-config") + print(" seiscomp update-config Run update-config for allmodules") + print(" seiscomp update-config trunk Run update-config for all trunk modules") + print(" seiscomp update-config scautopick Run update-config for scautopick") + return 0 + + cmd = args[0] + try: + func = globals()["on_" + cmd.replace("-", "_") + "_help"] + except BaseException: + print("Sorry, no help available for %s" % cmd) + return 1 + func(args[1:]) + return 0 + + +def run_action(runAction, args, flags): + try: + func = globals()["on_" + runAction.replace("-", "_")] + return func(args, flags) + except Exception as exc: + error("command '%s' failed: %s" % (runAction, str(exc))) + if "debug" in flags: + info = traceback.format_exception(*sys.exc_info()) + for i in info: + sys.stderr.write(i) + return 2 + + +def on_csv_help(_): + print("If --csv is prepended to a usual command the internal output is") + print("set to comma separated values. The only command that currently") + print("uses this output format is 'status'.") + print() + print("Example:") + print("seiscomp --csv status") + return 0 + + +# ------------------------------------------------------------------------------ +# Check command line +# ------------------------------------------------------------------------------ +useCSV = False +asRoot = False +lockTimeout = None +interactiveMode = False + +argv = sys.argv[1:] +argflags = [] + +# Check for flags +while argv: + if argv[0] == "--csv": + useCSV = True + argv = argv[1:] + elif argv[0] == "--asroot": + asRoot = True + argv = argv[1:] + if argv[0] == "--interactive" or argv[0] == "-i": + interactiveMode = True + argv = argv[1:] + elif argv[0] == "--wait": + argv = argv[1:] + if not argv: + print("--wait expects an integer value in seconds") + sys.exit(1) + try: + lockTimeout = int(argv[0]) + except BaseException: + print("Wait timeout is not an integer: %s" % argv[0]) + sys.exit(1) + if lockTimeout < 0: + print("Wait timeout must be positive: %s" % argv[0]) + sys.exit(1) + argv = argv[1:] + elif argv[0].startswith("--"): + argflags.append(argv[0][2:]) + argv = argv[1:] + else: + break + +if len(argv) < 1: + print("seiscomp [flags] {%s} [args]" % "|".join(allowed_actions)) + print("\nUse 'seiscomp help' to get more help") + sys.exit(1) + +action = argv[0] +arguments = argv[1:] + +if action not in allowed_actions: + print("seiscomp [flags] {%s} [args]" % "|".join(allowed_actions)) + sys.exit(1) + +if os.getuid() == 0 and not asRoot and action != "install-deps": + print("Running 'seiscomp' as root is dangerous. Use --asroot only if you") + print("know exactly what you are doing!") + sys.exit(1) + +# ------------------------------------------------------------------------------ +# Initialize the environment +# ------------------------------------------------------------------------------ + +# Resolve symlinks to files (if any) +if os.path.islink(sys.argv[0]): + # Read the link target + target = os.readlink(sys.argv[0]) + # Is the target an absolute path then take it as is + if os.path.isabs(target): + sys.argv[0] = target + # Otherwise join the dirname of the script with the target + # to get the semi-real path of the seiscomp script. Semi-real + # refers to the fact that symlinks are not completely resolved + # and why the usage of os.path.realpath is avoided. If the + # seiscomp directory itself is a symlink it should be preserved. + else: + sys.argv[0] = os.path.join(os.path.dirname(sys.argv[0]), target) + +# Guess SEISCOMP_ROOT from path of called script, directory links are not +# resolved allowing to create separate SeisComP environments +if os.path.isabs(sys.argv[0]): + root_path = sys.argv[0] +else: + cwd = os.getenv('PWD') + if cwd is None: + cwd = os.getcwd() + root_path = os.path.join(cwd, sys.argv[0]) + +SEISCOMP_ROOT = os.path.dirname(os.path.dirname(os.path.normpath(root_path))) +INIT_PATH = os.path.join(SEISCOMP_ROOT, "etc", "init") +DESC_PATH = os.path.join(SEISCOMP_ROOT, "etc", "descriptions") +ALIAS_FILE = os.path.join(DESC_PATH, "aliases") +BIN_PATH = os.path.join(SEISCOMP_ROOT, "bin") +SBIN_PATH = os.path.join(SEISCOMP_ROOT, "sbin") +PYTHONPATH = os.path.join(SEISCOMP_ROOT, "lib", "python") +MANPATH = os.path.join(SEISCOMP_ROOT, "share", "man") +LD_LIBRARY_PATH = os.path.join(SEISCOMP_ROOT, "lib") +DYLD_FALLBACK_FRAMEWORK_PATH = os.path.join(SEISCOMP_ROOT, "lib", "3rd-party") + +# Run another process with proper LD_LIBRARY_PATH set otherwise the dynamic +# linker will not find dependent SC3 libraries +isWrapped = False +try: + if os.environ["SEISCOMP_WRAP"] == "TRUE": + isWrapped = True +except BaseException: + pass + + +# Setup signal handler +#signal.signal(signal.SIGTERM, sigterm_handler) + +if not isWrapped: + try: + os.environ["PATH"] = BIN_PATH + ":" + os.environ["PATH"] + except BaseException: + os.environ["PATH"] = BIN_PATH + + try: + os.environ[SysLibraryPathVar] = get_library_path() + ":" + \ + os.environ[SysLibraryPathVar] + except BaseException: + os.environ[SysLibraryPathVar] = get_library_path() + + if sys.platform == "darwin": + os.environ[SysFrameworkPathVar] = get_framework_path() + + try: + os.environ["PYTHONPATH"] = PYTHONPATH + ":" + os.environ["PYTHONPATH"] + except BaseException: + os.environ["PYTHONPATH"] = PYTHONPATH + try: + os.environ["MANPATH"] = MANPATH + ":" + os.environ["MANPATH"] + except BaseException: + os.environ["MANPATH"] = MANPATH + + os.environ["SEISCOMP_WRAP"] = "TRUE" + + sys.exit(system(sys.argv)) + +# Register local lib/python in SEARCH PATH +sys.path.insert(0, PYTHONPATH) + +# Create environment which supports queries for various SeisComP +# directoris and sets PATH, LD_LIBRARY_PATH and PYTHONPATH +env = seiscomp.kernel.Environment(SEISCOMP_ROOT) +env.setCSVOutput(useCSV) + +# Check for lock file +isChild = False + +if action in actions_without_lock: + isChild = True +else: + try: + isChild = os.environ["SEISCOMP_LOCK"] == "TRUE" + except KeyError: + pass + +if not isChild: + if not env.tryLock("seiscomp", lockTimeout): + error("Could not get lock %s - is another process using it?" % + env.lockFile("seiscomp")) + sys.exit(1) + + os.environ["SEISCOMP_LOCK"] = "TRUE" + exitcode = system( + ["run_with_lock", "-q", env.lockFile("seiscomp")] + sys.argv) + sys.exit(exitcode) + + +# Change into SEISCOMP_ROOT directory. The env instance will change +# back into the current working directory automatically if destroyed. +env.chroot() + +simpleCommand = (action == "install-deps") or \ + (action == "print" and arguments == "env") + +if not simpleCommand: + config_mods = load_init_modules(INIT_PATH) + mods = [] + for m in config_mods: + if m.isConfigModule: + continue + mods.append(m) + +sys.exit(run_action(action, arguments, argflags)) diff --git a/bin/seiscomp-python b/bin/seiscomp-python new file mode 100755 index 0000000..45b803e --- /dev/null +++ b/bin/seiscomp-python @@ -0,0 +1,19 @@ +#!/bin/sh +# +# This is a shell script that executes the Python interpreter as +# configured using cmake. +# +# In order to use this in your Python programs use this +# shebang line: + +#!/usr/bin/env seiscomp-python + +# Please note that this wrapper does *not* set the environment +# variables for you. To ensure that you run your script in the +# proper environment, please use 'seiscomp exec'. Alternatively +# you can also set your environment variables according to the +# output of 'seiscomp print env'. + +python_executable="/usr/bin/python3" + +exec $python_executable "$@" diff --git a/bin/sh2proc b/bin/sh2proc new file mode 100755 index 0000000..891eb78 --- /dev/null +++ b/bin/sh2proc @@ -0,0 +1,884 @@ +#!/usr/bin/env seiscomp-python +# -*- coding: utf-8 -*- +############################################################################ +# Copyright (C) GFZ Potsdam # +# All rights reserved. # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +# # +# Author: Alexander Jaeger, Stephan Herrnkind, # +# Lukas Lehmann, Dirk Roessler# # +# Email: herrnkind@gempa.de # +############################################################################ + + +import seiscomp.client, seiscomp.core, seiscomp.datamodel, seiscomp.io, seiscomp.logging, seiscomp.math +from time import strptime +import sys +import traceback + +TimeFormats = [ + '%d-%b-%Y_%H:%M:%S.%f', + '%d-%b-%Y_%H:%M:%S' +] + + +# SC3 has more event types available in the datamodel +EventTypes = { + 'teleseismic quake': seiscomp.datamodel.EARTHQUAKE, + 'local quake': seiscomp.datamodel.EARTHQUAKE, + 'regional quake': seiscomp.datamodel.EARTHQUAKE, + 'quarry blast': seiscomp.datamodel.QUARRY_BLAST, + 'nuclear explosion': seiscomp.datamodel.NUCLEAR_EXPLOSION, + 'mining event': seiscomp.datamodel.MINING_EXPLOSION +} + + +def wfs2Str(wfsID): + return '%s.%s.%s.%s' % (wfsID.networkCode(), wfsID.stationCode(), + wfsID.locationCode(), wfsID.channelCode()) + + +############################################################################### +class SH2Proc(seiscomp.client.Application): + + ########################################################################### + def __init__(self): + seiscomp.client.Application.__init__(self, len(sys.argv), sys.argv) + self.setMessagingEnabled(True) + self.setDatabaseEnabled(True, True) + self.setLoadInventoryEnabled(True) + self.setLoadConfigModuleEnabled(True) + self.setDaemonEnabled(False) + + self.inputFile = '-' + + ########################################################################### + def initConfiguration(self): + if not seiscomp.client.Application.initConfiguration(self): + return False + + # If the database connection is passed via command line or configuration + # file then messaging is disabled. Messaging is only used to get + # the configured database connection URI. + if self.databaseURI() != '': + self.setMessagingEnabled(False) + else: + # A database connection is not required if the inventory is loaded + # from file + if not self.isInventoryDatabaseEnabled(): + self.setMessagingEnabled(False) + self.setDatabaseEnabled(False, False) + + return True + + ########################################################################## + def printUsage(self): + + print('''Usage: + sh2proc [options] + +Convert Seismic Handler event data to SeisComP XML format''') + + seiscomp.client.Application.printUsage(self) + + print('''Examples: +Convert the Seismic Handler file shm.evt to SCML. Receive the database +connection to read inventory and configuration information from messaging + sh2proc shm.evt + +Read Seismic Handler data from stdin. Provide inventory and configuration in XML + cat shm.evt | sh2proc --inventory-db=inventory.xml --config-db=config.xml +''') + + ########################################################################## + def validateParameters(self): + if not seiscomp.client.Application.validateParameters(self): + return False + + for opt in self.commandline().unrecognizedOptions(): + if len(opt) > 1 and opt.startswith('-'): + continue + + self.inputFile = opt + break + + return True + + ########################################################################### + def loadStreams(self): + now = seiscomp.core.Time.GMT() + inv = seiscomp.client.Inventory.Instance() + + self.streams = {} + + # try to load streams by detecLocid and detecStream + mod = self.configModule() + if mod is not None and mod.configStationCount() > 0: + seiscomp.logging.info('loading streams using detecLocid and detecStream') + for i in range(mod.configStationCount()): + cfg = mod.configStation(i) + net = cfg.networkCode() + sta = cfg.stationCode() + if sta in self.streams: + seiscomp.logging.warning( + 'ambiguous stream id found for station %s.%s' % (net, sta)) + continue + + setup = seiscomp.datamodel.findSetup(cfg, self.name(), True) + if not setup: + seiscomp.logging.warning( + 'could not find station setup for %s.%s' % (net, sta)) + continue + + params = seiscomp.datamodel.ParameterSet.Find(setup.parameterSetID()) + if not params: + seiscomp.logging.warning( + 'could not find station parameters for %s.%s' % (net, sta)) + continue + + detecLocid = '' + detecStream = None + + for j in range(params.parameterCount()): + param = params.parameter(j) + if param.name() == 'detecStream': + detecStream = param.value() + elif param.name() == 'detecLocid': + detecLocid = param.value() + + if detecStream is None: + seiscomp.logging.warning( + 'could not find detecStream for %s.%s' % (net, sta)) + continue + + loc = inv.getSensorLocation(net, sta, detecLocid, now) + if loc is None: + seiscomp.logging.warning( + 'could not find preferred location for %s.%s' % (net, sta)) + continue + + components = {} + tc = seiscomp.datamodel.ThreeComponents() + seiscomp.datamodel.getThreeComponents(tc, loc, detecStream[:2], now) + if tc.vertical(): + cha = tc.vertical() + wfsID = seiscomp.datamodel.WaveformStreamID(net, sta, loc.code(), + cha.code(), '') + components[cha.code()[-1]] = wfsID + seiscomp.logging.debug('add stream %s (vertical)' % wfs2Str(wfsID)) + if tc.firstHorizontal(): + cha = tc.firstHorizontal() + wfsID = seiscomp.datamodel.WaveformStreamID(net, sta, loc.code(), + cha.code(), '') + components[cha.code()[-1]] = wfsID + seiscomp.logging.debug('add stream %s (first horizontal)' % wfs2Str(wfsID)) + if tc.secondHorizontal(): + cha = tc.secondHorizontal() + wfsID = seiscomp.datamodel.WaveformStreamID(net, sta, loc.code(), + cha.code(), '') + components[cha.code()[-1]] = wfsID + seiscomp.logging.debug('add stream %s (second horizontal)' % wfs2Str(wfsID)) + if len(components) > 0: + self.streams[sta] = components + + return + + # fallback loading streams from inventory + seiscomp.logging.warning( + 'no configuration module available, loading streams ' + 'from inventory and selecting first available stream ' + 'matching epoch') + for iNet in range(inv.inventory().networkCount()): + net = inv.inventory().network(iNet) + seiscomp.logging.debug('network %s: loaded %i stations' % (net.code(), net.stationCount())) + for iSta in range(net.stationCount()): + sta = net.station(iSta) + try: + start = sta.start() + if not start <= now: + continue + except: + continue + + try: + end = sta.end() + if not now <= end: + continue + except: + pass + + for iLoc in range(sta.sensorLocationCount()): + loc = sta.sensorLocation(iLoc) + for iCha in range(loc.streamCount()): + cha = loc.stream(iCha) + + wfsID = seiscomp.datamodel.WaveformStreamID(net.code(), + sta.code(), loc.code(), cha.code(), '') + comp = cha.code()[2] + if sta.code() not in self.streams: + components = {} + components[comp] = wfsID + self.streams[sta.code()] = components + else: + # Seismic Handler does not support network, + # location and channel code: make sure network and + # location codes match first item in station + # specific steam list + oldWfsID = list(self.streams[sta.code()].values())[0] + if net.code() != oldWfsID.networkCode() or \ + loc.code() != oldWfsID.locationCode() or \ + cha.code()[:2] != oldWfsID.channelCode()[:2]: + seiscomp.logging.warning( + 'ambiguous stream id found for station %s, ignoring %s' + % (sta.code(), wfs2Str(wfsID))) + continue + + self.streams[sta.code()][comp] = wfsID + + seiscomp.logging.debug('add stream %s' % wfs2Str(wfsID)) + + ########################################################################### + def parseTime(self, timeStr): + time = seiscomp.core.Time() + for fmt in TimeFormats: + if time.fromString(timeStr, fmt): + break + return time + + ########################################################################### + def parseMagType(self, value): + if value == 'm': + return 'M' + elif value == 'ml': + return 'ML' + elif value == 'mb': + return 'mb' + elif value == 'ms': + return 'Ms(BB)' + elif value == 'mw': + return 'Mw' + elif value == 'bb': + return 'mB' + + return '' + + ########################################################################### + def sh2proc(self, file): + ep = seiscomp.datamodel.EventParameters() + origin = seiscomp.datamodel.Origin.Create() + event = seiscomp.datamodel.Event.Create() + + origin.setCreationInfo(seiscomp.datamodel.CreationInfo()) + origin.creationInfo().setCreationTime(seiscomp.core.Time.GMT()) + + originQuality = None + originCE = None + latFound = False + lonFound = False + depthError = None + originComments = {} + + # variables, reset after 'end of phase' + pick = None + stationMag = None + staCode = None + compCode = None + stationMagBB = None + + amplitudeDisp = None + amplitudeVel = None + amplitudeSNR = None + amplitudeBB = None + + magnitudeMB = None + magnitudeML = None + magnitudeMS = None + magnitudeBB = None + + km2degFac = 1.0 / seiscomp.math.deg2km(1.0) + + # read file line by line, split key and value at colon + iLine = 0 + for line in file: + iLine += 1 + a = line.split(':', 1) + key = a[0].strip() + keyLower = key.lower() + value = None + + # empty line + if len(keyLower) == 0: + continue + + # end of phase + elif keyLower == '--- end of phase ---': + if pick is None: + seiscomp.logging.warning( + 'Line %i: found empty phase block' % iLine) + continue + + if staCode is None or compCode is None: + seiscomp.logging.warning( + 'Line %i: end of phase, stream code incomplete' % iLine) + continue + + if not staCode in self.streams: + seiscomp.logging.warning( + 'Line %i: end of phase, station code %s not found in inventory' % (iLine, staCode)) + continue + + if not compCode in self.streams[staCode]: + seiscomp.logging.warning( + 'Line %i: end of phase, component %s of station %s not found in inventory' % (iLine, compCode, staCode)) + continue + + streamID = self.streams[staCode][compCode] + + pick.setWaveformID(streamID) + ep.add(pick) + + arrival.setPickID(pick.publicID()) + arrival.setPhase(phase) + origin.add(arrival) + + if amplitudeSNR is not None: + amplitudeSNR.setPickID(pick.publicID()) + amplitudeSNR.setWaveformID(streamID) + ep.add(amplitudeSNR) + + if amplitudeBB is not None: + amplitudeBB.setPickID(pick.publicID()) + amplitudeBB.setWaveformID(streamID) + ep.add(amplitudeBB) + + if stationMagBB is not None: + stationMagBB.setWaveformID(streamID) + origin.add(stationMagBB) + stationMagContrib = seiscomp.datamodel.StationMagnitudeContribution() + stationMagContrib.setStationMagnitudeID( + stationMagBB.publicID()) + if magnitudeBB is None: + magnitudeBB = seiscomp.datamodel.Magnitude.Create() + magnitudeBB.add(stationMagContrib) + + if stationMag is not None: + if stationMag.type() in ['mb', 'ML'] and amplitudeDisp is not None: + amplitudeDisp.setPickID(pick.publicID()) + amplitudeDisp.setWaveformID(streamID) + amplitudeDisp.setPeriod( + seiscomp.datamodel.RealQuantity(ampPeriod)) + amplitudeDisp.setType(stationMag.type()) + ep.add(amplitudeDisp) + + if stationMag.type() in ['Ms(BB)'] and amplitudeVel is not None: + amplitudeVel.setPickID(pick.publicID()) + amplitudeVel.setWaveformID(streamID) + amplitudeVel.setPeriod( + seiscomp.datamodel.RealQuantity(ampPeriod)) + amplitudeVel.setType(stationMag.type()) + ep.add(amplitudeVel) + + stationMag.setWaveformID(streamID) + origin.add(stationMag) + + stationMagContrib = seiscomp.datamodel.StationMagnitudeContribution() + stationMagContrib.setStationMagnitudeID( + stationMag.publicID()) + + magType = stationMag.type() + if magType == 'ML': + if magnitudeML is None: + magnitudeML = seiscomp.datamodel.Magnitude.Create() + magnitudeML.add(stationMagContrib) + + elif magType == 'Ms(BB)': + if magnitudeMS is None: + magnitudeMS = seiscomp.datamodel.Magnitude.Create() + magnitudeMS.add(stationMagContrib) + + elif magType == 'mb': + if magnitudeMB is None: + magnitudeMB = seiscomp.datamodel.Magnitude.Create() + magnitudeMB.add(stationMagContrib) + + pick = None + staCode = None + compCode = None + stationMag = None + stationMagBB = None + amplitudeDisp = None + amplitudeVel = None + amplitudeSNR = None + amplitudeBB = None + continue + + # empty key + elif len(a) == 1: + seiscomp.logging.warning('Line %i: key without value' % iLine) + continue + + value = a[1].strip() + if pick is None: + pick = seiscomp.datamodel.Pick.Create() + arrival = seiscomp.datamodel.Arrival() + + try: + ############################################################## + # station parameters + + # station code + if keyLower == 'station code': + staCode = value + + # pick time + elif keyLower == 'onset time': + pick.setTime(seiscomp.datamodel.TimeQuantity(self.parseTime(value))) + + # pick onset type + elif keyLower == 'onset type': + found = False + for onset in [seiscomp.datamodel.EMERGENT, seiscomp.datamodel.IMPULSIVE, + seiscomp.datamodel.QUESTIONABLE]: + if value == seiscomp.datamodel.EPickOnsetNames_name(onset): + pick.setOnset(onset) + found = True + break + if not found: + raise Exception('Unsupported onset value') + + # phase code + elif keyLower == 'phase name': + phase = seiscomp.datamodel.Phase() + phase.setCode(value) + pick.setPhaseHint(phase) + + # event type + elif keyLower == 'event type': + evttype = EventTypes[value] + event.setType(evttype) + originComments[key] = value + + # filter ID + elif keyLower == 'applied filter': + pick.setFilterID(value) + + # channel code, prepended by configured Channel prefix if only + # one character is found + elif keyLower == 'component': + compCode = value + + # pick evaluation mode + elif keyLower == 'pick type': + found = False + for mode in [seiscomp.datamodel.AUTOMATIC, seiscomp.datamodel.MANUAL]: + if value == seiscomp.datamodel.EEvaluationModeNames_name(mode): + pick.setEvaluationMode(mode) + found = True + break + if not found: + raise Exception('Unsupported evaluation mode value') + + # pick author + elif keyLower == 'analyst': + creationInfo = seiscomp.datamodel.CreationInfo() + creationInfo.setAuthor(value) + pick.setCreationInfo(creationInfo) + + # pick polarity + # isn't tested + elif keyLower == 'sign': + if value == 'positive': + sign = '0' # positive + elif value == 'negative': + sign = '1' # negative + else: + sign = '2' # unknown + pick.setPolarity(float(sign)) + + # arrival weight + elif keyLower == 'weight': + arrival.setWeight(float(value)) + + # arrival azimuth + elif keyLower == 'theo. azimuth (deg)': + arrival.setAzimuth(float(value)) + + # pick theo backazimuth + elif keyLower == 'theo. backazimuth (deg)': + if pick.slownessMethodID() == 'corrected': + seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) + else: + pick.setBackazimuth( + seiscomp.datamodel.RealQuantity(float(value))) + pick.setSlownessMethodID('theoretical') + + # pick beam slowness + elif keyLower == 'beam-slowness (sec/deg)': + if pick.slownessMethodID() == 'corrected': + seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) + else: + pick.setHorizontalSlowness( + seiscomp.datamodel.RealQuantity(float(value))) + pick.setSlownessMethodID('Array Beam') + + # pick beam backazimuth + elif keyLower == 'beam-azimuth (deg)': + if pick.slownessMethodID() == 'corrected': + seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) + else: + pick.setBackazimuth( + seiscomp.datamodel.RealQuantity(float(value))) + + # pick epi slowness + elif keyLower == 'epi-slowness (sec/deg)': + pick.setHorizontalSlowness( + seiscomp.datamodel.RealQuantity(float(value))) + pick.setSlownessMethodID('corrected') + + # pick epi backazimuth + elif keyLower == 'epi-azimuth (deg)': + pick.setBackazimuth(seiscomp.datamodel.RealQuantity(float(value))) + + # arrival distance degree + elif keyLower == 'distance (deg)': + arrival.setDistance(float(value)) + + # arrival distance km, recalculates for degree + elif keyLower == 'distance (km)': + if isinstance(arrival.distance(), float): + seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine-1, 'distance (deg)')) + arrival.setDistance(float(value) * km2degFac) + + # arrival time residual + elif keyLower == 'residual time': + arrival.setTimeResidual(float(value)) + + # amplitude snr + elif keyLower == 'signal/noise': + amplitudeSNR = seiscomp.datamodel.Amplitude.Create() + amplitudeSNR.setType('SNR') + amplitudeSNR.setAmplitude( + seiscomp.datamodel.RealQuantity(float(value))) + + # amplitude period + elif keyLower.startswith('period'): + ampPeriod = float(value) + + # amplitude value for displacement + elif keyLower == 'amplitude (nm)': + amplitudeDisp = seiscomp.datamodel.Amplitude.Create() + amplitudeDisp.setAmplitude( + seiscomp.datamodel.RealQuantity(float(value))) + amplitudeDisp.setUnit('nm') + + # amplitude value for velocity + elif keyLower.startswith('vel. amplitude'): + amplitudeVel = seiscomp.datamodel.Amplitude.Create() + amplitudeVel.setAmplitude( + seiscomp.datamodel.RealQuantity(float(value))) + amplitudeVel.setUnit('nm/s') + + elif keyLower == 'bb amplitude (nm/sec)': + amplitudeBB = seiscomp.datamodel.Amplitude.Create() + amplitudeBB.setAmplitude( + seiscomp.datamodel.RealQuantity(float(value))) + amplitudeBB.setType('mB') + amplitudeBB.setUnit('nm/s') + amplitudeBB.setPeriod(seiscomp.datamodel.RealQuantity(ampBBPeriod)) + + elif keyLower == 'bb period (sec)': + ampBBPeriod = float(value) + + elif keyLower == 'broadband magnitude': + magType = self.parseMagType('bb') + stationMagBB = seiscomp.datamodel.StationMagnitude.Create() + stationMagBB.setMagnitude( + seiscomp.datamodel.RealQuantity(float(value))) + stationMagBB.setType(magType) + stationMagBB.setAmplitudeID(amplitudeBB.publicID()) + + # ignored + elif keyLower == 'quality number': + seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) + + # station magnitude value and type + elif keyLower.startswith('magnitude '): + magType = self.parseMagType(key[10:]) + stationMag = seiscomp.datamodel.StationMagnitude.Create() + stationMag.setMagnitude( + seiscomp.datamodel.RealQuantity(float(value))) + + if len(magType) > 0: + stationMag.setType(magType) + if magType == 'mb': + stationMag.setAmplitudeID(amplitudeDisp.publicID()) + + elif magType == 'MS(BB)': + stationMag.setAmplitudeID(amplitudeVel.publicID()) + else: + seiscomp.logging.debug('Line %i: Magnitude Type not known %s.' % (iLine, magType)) + + ############################################################### + # origin parameters + + # event ID, added as origin comment later on + elif keyLower == 'event id': + originComments[key] = value + + # magnitude value and type + elif keyLower == 'mean bb magnitude': + magType = self.parseMagType('bb') + if magnitudeBB is None: + magnitudeBB = seiscomp.datamodel.Magnitude.Create() + magnitudeBB.setMagnitude( + seiscomp.datamodel.RealQuantity(float(value))) + magnitudeBB.setType(magType) + + elif keyLower.startswith('mean magnitude '): + magType = self.parseMagType(key[15:]) + + if magType == 'ML': + if magnitudeML is None: + magnitudeML = seiscomp.datamodel.Magnitude.Create() + magnitudeML.setMagnitude( + seiscomp.datamodel.RealQuantity(float(value))) + magnitudeML.setType(magType) + + elif magType == 'Ms(BB)': + if magnitudeMS is None: + magnitudeMS = seiscomp.datamodel.Magnitude.Create() + magnitudeMS.setMagnitude( + seiscomp.datamodel.RealQuantity(float(value))) + magnitudeMS.setType(magType) + + elif magType == 'mb': + if magnitudeMB is None: + magnitudeMB = seiscomp.datamodel.Magnitude.Create() + magnitudeMB.setMagnitude( + seiscomp.datamodel.RealQuantity(float(value))) + magnitudeMB.setType(magType) + + else: + seiscomp.logging.warning('Line %i: Magnitude type %s not defined yet.' % (iLine, magType)) + + # latitude + elif keyLower == 'latitude': + origin.latitude().setValue(float(value)) + latFound = True + elif keyLower == 'error in latitude (km)': + origin.latitude().setUncertainty(float(value)) + + # longitude + elif keyLower == 'longitude': + origin.longitude().setValue(float(value)) + lonFound = True + elif keyLower == 'error in longitude (km)': + origin.longitude().setUncertainty(float(value)) + + # depth + elif keyLower == 'depth (km)': + origin.setDepth(seiscomp.datamodel.RealQuantity(float(value))) + if depthError is not None: + origin.depth().setUncertainty(depthError) + elif keyLower == 'depth type': + seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) + elif keyLower == 'error in depth (km)': + depthError = float(value) + try: + origin.depth().setUncertainty(depthError) + except seiscomp.core.ValueException: + pass + + # time + elif keyLower == 'origin time': + origin.time().setValue(self.parseTime(value)) + elif keyLower == 'error in origin time': + origin.time().setUncertainty(float(value)) + + # location method + elif keyLower == 'location method': + origin.setMethodID(str(value)) + + # region table, added as origin comment later on + elif keyLower == 'region table': + originComments[key] = value + + # region table, added as origin comment later on + elif keyLower == 'region id': + originComments[key] = value + + # source region, added as origin comment later on + elif keyLower == 'source region': + originComments[key] = value + + # used station count + elif keyLower == 'no. of stations used': + if originQuality is None: + originQuality = seiscomp.datamodel.OriginQuality() + originQuality.setUsedStationCount(int(value)) + + # ignored + elif keyLower == 'reference location name': + seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) + + # confidence ellipsoid major axis + elif keyLower == 'error ellipse major': + if originCE is None: + originCE = seiscomp.datamodel.ConfidenceEllipsoid() + originCE.setSemiMajorAxisLength(float(value)) + + # confidence ellipsoid minor axis + elif keyLower == 'error ellipse minor': + if originCE is None: + originCE = seiscomp.datamodel.ConfidenceEllipsoid() + originCE.setSemiMinorAxisLength(float(value)) + + # confidence ellipsoid rotation + elif keyLower == 'error ellipse strike': + if originCE is None: + originCE = seiscomp.datamodel.ConfidenceEllipsoid() + originCE.setMajorAxisRotation(float(value)) + + # azimuthal gap + elif keyLower == 'max azimuthal gap (deg)': + if originQuality is None: + originQuality = seiscomp.datamodel.OriginQuality() + originQuality.setAzimuthalGap(float(value)) + + # creation info author + elif keyLower == 'author': + origin.creationInfo().setAuthor(value) + + # creation info agency + elif keyLower == 'source of information': + origin.creationInfo().setAgencyID(value) + + # earth model id + elif keyLower == 'velocity model': + origin.setEarthModelID(value) + + # standard error + elif keyLower == 'rms of residuals (sec)': + if originQuality is None: + originQuality = seiscomp.datamodel.OriginQuality() + originQuality.setStandardError(float(value)) + + # ignored + elif keyLower == 'phase flags': + seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) + + # ignored + elif keyLower == 'location input params': + seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) + + # missing keys + elif keyLower == 'ampl&period source': + seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) + + elif keyLower == 'location quality': + seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) + + elif keyLower == 'reference latitude': + seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) + + elif keyLower == 'reference longitude': + seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) + + elif keyLower.startswith('amplitude time'): + seiscomp.logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) + + # unknown key + else: + seiscomp.logging.warning('Line %i: ignoring unknown parameter: %s' % (iLine, key)) + + except ValueError as ve: + seiscomp.logging.warning('Line %i: can not parse %s value' % (iLine, key)) + except Exception: + seiscomp.logging.error('Line %i: %s' % (iLine, str(traceback.format_exc()))) + return None + + # check + if not latFound: + seiscomp.logging.warning('could not add origin, missing latitude parameter') + elif not lonFound: + seiscomp.logging.warning('could not add origin, missing longitude parameter') + elif not origin.time().value().valid(): + seiscomp.logging.warning('could not add origin, missing origin time parameter') + else: + if magnitudeMB is not None: + origin.add(magnitudeMB) + if magnitudeML is not None: + origin.add(magnitudeML) + if magnitudeMS is not None: + origin.add(magnitudeMS) + if magnitudeBB is not None: + origin.add(magnitudeBB) + + ep.add(event) + ep.add(origin) + + if originQuality is not None: + origin.setQuality(originQuality) + + if originCE is not None: + uncertainty = seiscomp.datamodel.OriginUncertainty() + uncertainty.setConfidenceEllipsoid(originCE) + origin.setUncertainty(uncertainty) + + for k, v in originComments.items(): + comment = seiscomp.datamodel.Comment() + comment.setId(k) + comment.setText(v) + origin.add(comment) + + return ep + + ########################################################################### + def run(self): + self.loadStreams() + + try: + if self.inputFile == '-': + f = sys.stdin + else: + f = open(self.inputFile) + except IOError as e: + seiscomp.logging.error(str(e)) + return False + + ep = self.sh2proc(f) + if ep is None: + return False + + ar = seiscomp.io.XMLArchive() + ar.create('-') + ar.setFormattedOutput(True) + ar.writeObject(ep) + ar.close() + + return True + + +############################################################################### +def main(): + try: + app = SH2Proc() + return app() + except: + sys.stderr.write(str(traceback.format_exc())) + + return 1 + + +if __name__ == '__main__': + sys.exit(main()) + + +# vim: ts=4 et diff --git a/bin/slarchive b/bin/slarchive new file mode 100755 index 0000000..f742847 Binary files /dev/null and b/bin/slarchive differ diff --git a/bin/slinktool b/bin/slinktool new file mode 100755 index 0000000..29fa619 Binary files /dev/null and b/bin/slinktool differ diff --git a/bin/slmon b/bin/slmon new file mode 100755 index 0000000..3ea75b2 --- /dev/null +++ b/bin/slmon @@ -0,0 +1,483 @@ +#!/usr/bin/env seiscomp-python + +from __future__ import print_function +from getopt import getopt, GetoptError +from time import time, gmtime +from datetime import datetime +import os, sys, signal, glob, re +from seiscomp.myconfig import MyConfig +import seiscomp.slclient +import seiscomp.kernel, seiscomp.config + +usage_info = """ +Usage: + slmon [options] + +SeedLink monitor creating static web pages + +Options: + -h, --help display this help message + -c ini_setup = arg + -s ini_stations = arg + -t refresh = float(arg) # XXX not yet used + -v verbose = 1 + +Examples: +Start slmon from the command line + slmon -c $SEISCOMP_ROOT/var/lib/slmon/config.ini + +Restart slmon in order to update the web pages. Use crontab entries for +automatic restart, e.g.: + */3 * * * * /home/sysop/seiscomp/bin/seiscomp check slmon >/dev/null 2>&1 +""" + +def usage(exitcode=0): + sys.stderr.write(usage_info) + exit(exitcode) + +try: + seiscompRoot=os.environ["SEISCOMP_ROOT"] +except: + print("\nSEISCOMP_ROOT must be defined - EXIT\n", file=sys.stderr) + usage(exitcode=2) + +ini_stations = os.path.join(seiscompRoot,'var/lib/slmon/stations.ini') +ini_setup = os.path.join(seiscompRoot,'var/lib/slmon/config.ini') + +regexStreams = re.compile("[SLBVEH][HNLG][ZNE123]") + +verbose = 0 + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def printCrontab(self): + print("3 * * * * %s/bin/seiscomp check slmon >/dev/null 2>&1" % (self.env.SEISCOMP_ROOT)) + +class Status: + + def __repr__(self): + return "%2s %-5s %2s %3s %1s %s %s" % \ + (self.net, self.sta, self.loc, self.cha, self.typ, \ + str(self.last_data), str(self.last_feed)) +class StatusDict(dict): + + def __init__(self, source=None): + if source: + self.read(source) + + def fromSlinkTool(self,server="",stations=["GE_MALT","GE_MORC","GE_IBBN"]): + # later this shall use XML + cmd = "slinktool -nd 10 -nt 10 -Q %s" % server + print(cmd) + f = os.popen(cmd) + # regex = re.compile("[SLBVEH][HNLG][ZNE123]") + regex = regexStreams + for line in f: + net_sta = line[:2].strip() + "_" + line[3:8].strip() + if not net_sta in stations: + continue + typ = line[16] + if typ != "D": + continue + cha = line[12:15].strip() + if not regex.match(cha): + continue + + d = Status() + d.net = line[ 0: 2].strip() + d.sta = line[ 3: 8].strip() + d.loc = line[ 9:11].strip() + d.cha = line[12:15] + d.typ = line[16] + d.last_data = seiscomp.slclient.timeparse(line[47:70]) + d.last_feed = d.last_data + sec = "%s_%s" % (d.net, d.sta) + sec = "%s.%s.%s.%s.%c" % (d.net, d.sta, d.loc, d.cha, d.typ) + self[sec] = d + + def read(self, source): + if type(source) == str: + source = file(source) + if type(source) == file: + source = source.readlines() + if type(source) != list: + raise TypeError('cannot read from %s' % str(type(source))) + + for line in source: + d = Status() + d.net = line[ 0: 2] + d.sta = line[ 3: 8].strip() + d.loc = line[ 9:11].strip() + d.cha = line[12:15] + d.typ = line[16] + d.last_data = seiscomp.slclient.timeparse(line[18:41]) + d.last_feed = seiscomp.slclient.timeparse(line[42:65]) + if d.last_feed < d.last_data: + d.last_feed = d.last_data + sec = "%s_%s:%s.%s.%c" % (d.net, d.sta, d.loc, d.cha, d.typ) + self[sec] = d + + def write(self, f): + if type(f) is str: + f = file(f, "w") + lines = [] + for key in list(self.keys()): + lines.append(str(self[key])) + lines.sort() + f.write('\n'.join(lines)+'\n') + + +def colorLegend(htmlfile): + htmlfile.write("

Latencies:
\n" \ + "\n\n" \ + "\n" \ + "\n" \ + "\n" \ + "\n" \ + "\n" \ + "\n" \ + "\n" \ + "\n" \ + "\n" \ + "\n" \ + "\n
 <30 m  < 1 h  < 2 h  < 6 h  < 1 d  < 2 d  < 3 d  < 4 d  < 5 d  > 5 d 
\n

\n") + +# encodes an email address so that it cannot (easily) be extracted +# from the web page. This is meant to be a spam protection. +def encode(txt): return ''.join(["&#%d;" % ord(c) for c in txt]) + +def total_seconds(td): return td.seconds + (td.days*86400) + +def pageTrailer(htmlfile, config): + + htmlfile.write("
\n" \ + "\n" \ + "\n\n" \ + " \n\n" \ + "
Last updated %04d/%02d/%02d %02d:%02d:%02d UTC%s
\n\n" % (gmtime()[:6] + (config['setup']['linkurl'],) + (config['setup']['linkname'],)) ) + +def getColor(delta): + delay = total_seconds(delta) + if delay >432000: return '#666666' + if delay >345600: return '#999999' + if delay >259200: return '#cccccc' + if delay >172800: return '#ffcccc' + if delay > 86400: return '#ff3333' + elif delay > 21600: return '#ff9966' + elif delay > 7200: return '#ffff00' + elif delay > 3600: return '#00ff00' + elif delay > 1800: return '#3399ff' + else: return '#cc99ff' + +TDdummy = "n/a" + +def TDf(delta, col="#ffffff"): + if delta is None: return TDdummy % col + + t = total_seconds(delta) + + if t > 86400: x = "%.1f d" % (t/86400.) + elif t > 7200: x = "%.1f h" % (t/3600.) + elif t > 120: x = "%.1f m" % (t/60.) + else: x = "%.1f s" % (t) + return "  %s " % \ + (col,x) + +def TDt(t, col="#ffffff"): + if t is None: return TDdummy % col + + x = t.strftime("%Y/%m/%d %H:%M:%S") + return " %s " % \ + (col,x) + +def myrename(name1, name2): + + # fault-tolerant rename that doesn't cause an exception if it fails, which + # may happen e.g. if the target is on a non-reachable NFS directory + try: + os.rename(name1, name2) + except OSError: + print("failed to rename(%s,%s)" % (name1, name2), file=sys.stderr) + + +def makeMainHTML(config): + + global status + + now = datetime.utcnow() + + stations = [] + + streams = [ x for x in list(status.keys()) if regexStreams.search(x) ] + + streams.sort() + + tmp_rt = [] + tmp_du = [] + + for label in streams: + lat1 = now - status[label].last_data # XXX + lat2 = now - status[label].last_feed # XXX + lat3 = lat1-lat2 # XXX + if lat3 == 0.: lat3 = lat2 = None + + if label[-2]=='.' and label[-1] in "DE": + label = label[:-2] + n,s,x,x = label.split(".") + if s in stations: continue # avoid duplicates for different locations + stations.append(s) + + net_sta = "%s_%s" % (n,s) + line = " %s %s %s%s%s" \ + % (n, net_sta, s, TDf(lat1, getColor(lat1)), + TDf(lat2, getColor(lat2)), + TDf(lat3, getColor(lat3))) + if config.station[net_sta]['type'][:4] == 'real': + tmp_rt.append(line) + else: tmp_du.append(line) + makeStatHTML(net_sta, config) + + try: os.makedirs(config['setup']['wwwdir']) + except: pass + + temp = "%s/tmp.html" % config['setup']['wwwdir'] + dest = "%s/index.html" % config['setup']['wwwdir'] + + table_begin = """ + + + + + + + + + + + """ + table_end = """ +
StationLatencies
DataFeedDiff.
+ """ + + htmlfile = open(temp, "w") + htmlfile.write(""" + + %s + + + + +
%s
\n""" % \ + ( config['setup']['title'], int(config['setup']['refresh']), + config['setup']['icon'], config['setup']['title'])) + + + htmlfile.write("
\n") + if len(tmp_rt): + htmlfile.write("\n") + if len(tmp_du): + htmlfile.write("\n") + htmlfile.write("") + if len(tmp_rt): + htmlfile.write("\n") + if len(tmp_du): + htmlfile.write("\n") + htmlfile.write("
\n" \ + "Real-time stations\n\n" \ + "Dial-up stations\n
\n") + htmlfile.write(table_begin) + htmlfile.write("\n".join(tmp_rt)) + htmlfile.write(table_end) + htmlfile.write("\n") + htmlfile.write(table_begin) + htmlfile.write("\n".join(tmp_du)) + htmlfile.write(table_end) + htmlfile.write("
\n") + + colorLegend(htmlfile) + pageTrailer(htmlfile, config) + htmlfile.close() + myrename(temp, dest) + + +def makeStatHTML(net_sta, config): + global status + + try: os.makedirs(config['setup']['wwwdir']) + except: pass + + temp = "%s/tmp2.html" % config['setup']['wwwdir'] + dest = "%s/%s.html" % ( config['setup']['wwwdir'], net_sta) + + htmlfile = open(temp, "w") + htmlfile.write(""" + + %s - Station %s + + + + +
%s - Station %s\n""" % \ + ( config['setup']['title'], net_sta, int(config['setup']['refresh']), + config['setup']['icon'], + config['setup']['title'], net_sta.split("_")[-1])) + + try: + name = config.station[net_sta]['info'] + htmlfile.write("
%s" % name) + except: pass + htmlfile.write("
\n") + + if 'text' in config.station[net_sta]: + htmlfile.write("

%s

\n" % config.station[net_sta]['text']) + + htmlfile.write("""

+ + + + + + + + + + + + + """) + + now = datetime.utcnow() + + netsta2=net_sta.replace("_",".") + streams = [ x for x in list(status.keys()) if x.find(netsta2)==0 ] + streams.sort() + for label in streams: + tim1 = status[label].last_data + tim2 = status[label].last_feed + + lat1, lat2, lat3 = now-tim1, now-tim2, tim2-tim1 + col1, col2, col3 = getColor(lat1), getColor(lat2), getColor(lat3) + if lat1==lat2: lat2 = lat3 = None + if label[-2]=='.' and label[-1] in "DE": + label = label[:-2] + n,s,loc,c = label.split(".") + c = ("%s.%s" % (loc,c)).strip(".") + htmlfile.write("%s%s%s%s%s\n" \ + % (s, c, TDt(tim1, col1), TDf(lat1, col1), + TDt(tim2, col2), TDf(lat2, col2), + TDf(lat3, col3))) + + htmlfile.write("
Station/
Channel
DataFeedDiff.
Last SampleLatencyLast ReceivedLatency
" \ + " %s %s 

\n") + colorLegend(htmlfile) + + htmlfile.write("

\nHow to interpret " \ + "these numbers?
\n") + if 'liveurl' in config['setup']: + # substitute '%s' in live_url by station name + url = config['setup']['liveurl'] % s + htmlfile.write("View a live seismogram of " + "station %s

\n" % (url, s)) + htmlfile.write("

\n") + pageTrailer(htmlfile, config) + htmlfile.close() + myrename(temp, dest) + +def read_ini(): + global config, ini_setup, ini_stations + print("\nreading setup config from '%s'" % ini_setup) + if not os.path.isfile(ini_setup): + print("[error] setup config '%s' does not exist" % ini_setup, file=sys.stderr) + usage(exitcode=2) + + config = MyConfig(ini_setup) + print("reading station config from '%s'" % ini_stations) + if not os.path.isfile(ini_stations): + print("[error] station config '%s' does not exist" % ini_stations, file=sys.stderr) + usage(exitcode=2) + config.station = MyConfig(ini_stations) + +def SIGINT_handler(signum, frame): + global status + print("received signal #%d => will write status file and exit" % signum) +# status.write("status.tab") + sys.exit(0) + +try: + opts, args = getopt(sys.argv[1:], "c:s:t:hv") +except GetoptError: + print("\nUnknown option in "+str(sys.argv[1:])+" - EXIT.", file=sys.stderr) + usage(exitcode=2) + +for flag, arg in opts: + if flag == "-c": ini_setup = arg + if flag == "-s": ini_stations = arg + if flag == "-t": refresh = float(arg) # XXX not yet used + if flag == "-h": usage(exitcode=0) + if flag == "-v": verbose = 1 + + +signal.signal(signal.SIGHUP, SIGINT_handler) +signal.signal(signal.SIGINT, SIGINT_handler) +signal.signal(signal.SIGQUIT, SIGINT_handler) +signal.signal(signal.SIGTERM, SIGINT_handler) + +read_ini() + +cha = "???" +loc = "" + +s = config.station +net_sta = ["%s_%s" % (s[k]['net'],s[k]['sta']) for k in s] +s_arg = ','.join(net_sta) +streams = [ (s[k]['net'],s[k]['sta'],loc,cha) for k in s ] + + +if 'server' in config['setup']: + server = config['setup']['server'] +else: server = "localhost" + +#def read_initial(config): +# +# for s in config.station: +# print s,glob.glob("/home/dcop/seedlink/%s/segments/*" % s) +# for f in glob.glob("/home/dcop/seedlink/%s/segments/*" % s): +# print f +# +#read_initial(config) + + +#print "reading initial time windows from file 'status.tab'" +#status = StatusDict("status.tab") +status = StatusDict() +#if verbose: status.write(sys.stderr) + + +print("generating output to '%s'" % config['setup']['wwwdir']) + +print("getting initial time windows from SeedLink server '%s'" % server) +status.fromSlinkTool(server, stations=net_sta) +if verbose: status.write(sys.stderr) + +nextTimeGenerateHTML = time() + +print("setting up connection to SeedLink server '%s'" % server) + +input = seiscomp.slclient.Input(server, streams) +for rec in input: + id = '.'.join([rec.net, rec.sta, rec.loc, rec.cha, rec.rectype]) +# if not id in status: continue # XXX XXX XXX + try: + status[id].last_data = rec.end_time + status[id].last_feed = datetime.utcnow() + except: + continue + + if time() > nextTimeGenerateHTML: + makeMainHTML(config) + nextTimeGenerateHTML = time() + int(config['setup']['refresh']) diff --git a/bin/tab2inv b/bin/tab2inv new file mode 100755 index 0000000..3b6603f --- /dev/null +++ b/bin/tab2inv @@ -0,0 +1,88 @@ +#!/usr/bin/env seiscomp-python + +from __future__ import print_function +import sys +from optparse import OptionParser +from nettab.tab import Tab +import seiscomp.io + +def main(): + # Creating the parser + parser = OptionParser(usage="Tab to Inventory (sc3) converter", version="1.0", add_help_option=True) + + parser.add_option("-i", "--ip", type="string", + help="Prefix to be added to each instrument generated.", dest="instrumentPrefix", default=None) + + parser.add_option("-f", "--filterf", type="string", + help="Indicates a folder containing the filters coefficients files", dest="ffolder", default=None) + + parser.add_option("-x", "--xmlf", type="string", + help="Indicates a folder containing the XML inventory files (needed for station group support)", dest="xfolder", default=None) + + parser.add_option("-D", "--database", type="string", + help="Database URL for inventory (needed for station group support)", dest="database", default=None) + + parser.add_option("", "--force", action="store_true", + help="Don't stop on error of individual files", dest="force", default=False) + + parser.add_option("-g", "--generate", action="store_true", + help="Generate XML file at the end", dest="generate", default=False) + + parser.add_option("-c", "--check", action="store_true", + help="Check the loaded files", dest="check", default=False) + + parser.add_option("-d", "--default", type="string", + help="Indicates the default file", dest="defaultFile", default=None) + + parser.add_option("-o", "--output", type="string", + help="Indicates the output file", dest="outFile", default="-") + + # Parsing & Error check + (options, args) = parser.parse_args() + error = False + + if len(args) < 1: + print("No input file(s) to digest", file=sys.stderr) + error = True + + if error: + print("Use -h for help on usage", file=sys.stderr) + return 1 + + # Execution + try: + inv = None + t=Tab(options.instrumentPrefix, options.defaultFile, options.ffolder, options.xfolder, options.database) + for f in args: + try: + t.digest(f) + except Exception as e: + print("Error digesting %s:\n %s" % (f, e), file=sys.stderr) + if not options.force: + raise e + + if options.check: + t.check() + return + + if options.generate: + inv = t.sc3Obj() + if inv: + ar = seiscomp.io.XMLArchive() + print("Generating file: %s" % options.outFile, file=sys.stderr) + ar.create(options.outFile) + ar.setFormattedOutput(True) + ar.setCompression(False) + ar.writeObject(inv) + ar.close() + except Exception as e: + print("Error: " + str(e), file=sys.stderr) + return 1 + finally: + print("Ending.", file=sys.stderr) + + return 0 + +if __name__ == "__main__": + ret = main() + sys.exit(ret) diff --git a/bin/tab2tab b/bin/tab2tab new file mode 100755 index 0000000..01c5cd4 --- /dev/null +++ b/bin/tab2tab @@ -0,0 +1,526 @@ +#!/usr/bin/env seiscomp-python + +from __future__ import print_function +import os +import sys +from datetime import datetime +from nettab.convertUtils import StationAttributes, NetworkAttributes, StationMappings, parseDate, formatDate, quote, hummanStr +from nettab.tab import Tab +from optparse import OptionParser +from nettab.nodesi import Instruments + +class TabConverter: + def __init__(self, networkCode): + self.__fmt__ = None + self.takeSugestions = None + + self.filename = None + + self.networkCode = networkCode + self.stationList = None + + self.nat = None + self.sat = None + self.sma = None + self.inst = None + self.defaultEpoch = parseDate("1980/001") + + self.start=0 + self.code=0 + self.description=0 + self.datalogger=0 + self.sensor=0 + self.channel=0 + self.gaind = 0 + self.longitude=0 + self.latitude=0 + self.elevation=0 + self.end=0 + self.depth=0 + self.orientation=0 + + ## default dates + self.startDate = parseDate("1980/001") + self.endDate = parseDate(None) + + def loadStationMapping(self, filename): + if self.networkCode is None: raise Exception("Cannot load Station mapping without network code") + if self.stationList is None: raise Exception("Cannot load Station mapping without station list") + + try: + sm = StationMappings(self.networkCode, self.stationList, filename) + self.sma = sm + except Exception as e: + raise e + + def loadStationAttribute(self, filename): + if self.networkCode is None: raise Exception("Cannot load Station att without network code") + if self.stationList is None: raise Exception("Cannot load Station att without station list") + + try: + sa = StationAttributes(self.networkCode, self.stationList, filename) + self.sat = sa + except Exception as e: + raise e + + def loadNetworkAttribute(self, filename): + if self.networkCode is None: raise Exception("Cannot load Network att without network code") + if self.stationList is None: raise Exception("Cannot load Network att without station list") + try: + na = NetworkAttributes(self.networkCode, filename) + self.nat = na + except Exception as e: + raise e + + def loadInstrumentsFile(self, filename, filterFolder): + tab = Tab(filterFolder=filterFolder) + tab.digest(filename) + if tab.i: + self.inst = tab.i + + def __fmtline__(self): + if not self.__fmt__: + fmt = "Sl: " + fmt += "%%-%ds" % self.code + fmt += " %%-%ds" % self.description + fmt += " %%-%ds" % self.datalogger + fmt += " %%-%ds" % self.sensor + fmt += " %%-%ds" % self.channel + fmt += " %%-%ds" % self.orientation + fmt += " %%-%ds" % self.latitude + fmt += " %%-%ds" % self.longitude + fmt += " %%-%ds" % self.elevation + fmt += " %%-%ds" % self.depth + fmt += " %%-%ds" % self.start + fmt += " %%-%ds" % self.end + self.__fmt__ = fmt + + return self.__fmt__ + + def __analyseLine__(self, items): + inputLine = " ".join(items) + if len(items) < 4: + raise Exception("Invalid items count on line %s" % inputLine) + + if len(items) <= 5: + netCode = items[2] + if netCode != self.networkCode: + raise Exception("Tab file (%s) doesn't match class (%s) -- %s" % (netCode,self.networkCode,inputLine)) + return [None, None, None] + else: + if len(items) < 6: + raise Exception("Invalid Station line %s" % inputLine) + + stationCode = items.pop(0) + code = len(stationCode) + self.code=max(self.code,code) + + description = len(quote(hummanStr(items.pop(0)))) + self.description=max(self.description, description) + + datalogger = len(items.pop(0)) + self.datalogger=max(self.datalogger, datalogger) + + sensor = len(items.pop(0)) + self.sensor=max(self.sensor, sensor) + + # Gain + gaind = items.pop(0) + if float(gaind) != 1.0: + self.datalogger = max (self.datalogger, datalogger + len(gaind)) + + channel = len(items.pop(0)) + self.channel=max(self.channel, channel) + + latitude = len(items.pop(0)) + self.latitude=max(self.latitude, latitude) + + longitude = len(items.pop(0)) + self.longitude=max(self.longitude, longitude) + + elevation = len(items.pop(0)) + self.elevation=max(self.elevation, elevation) + + #Orientation + depth = items.pop(0) + try: + float(depth) + orientation="ZNE" + except: + orientation = "Z" + (depth,a1,a2) = depth.split("/") + + a1n = float(a1) + a2n = float(a2) + + orientation+="1" + if a1n != 0.0: orientation += "(0.0,%s)"%a1 + + orientation+="2" + if a2n != 90.0: orientation+="(0.0,%s)"%a1 + + orientation = len(orientation) + self.orientation=max(self.orientation, orientation) + + depth = len(depth) + self.depth=max(self.depth, depth) + + # Start + try: + start = parseDate(items.pop(0)) + self.start = max (self.start, len(formatDate(start))) + except: + raise Exception ("Invalid Station line start date %s" % inputLine) + + # End + try: + end = parseDate(items.pop(0)) + except: + end=parseDate("") + pass + self.end = max (self.end, len(formatDate(end))) + + return [stationCode, start, end] + + def preload(self, filename, takeSugestions): + self.takeSugestions = takeSugestions + sugestedStart = datetime.now() + sugestedEnd = self.defaultEpoch + stationList = [] + + error = [] + + # Some initialization + if self.filename is not None: + raise Exception("Cannot pre-load two different files (current one is %s)" % self.filename) + + print("Analysing ... ", file=sys.stderr) + fd = open(filename) + for line in fd: + line = line.strip() + if not line or line[0] == "#": continue + + try: + (stationCode, start, end) = self.__analyseLine__(line.split()) + except Exception as e: + error.append(str(e)) + continue + + if not stationCode: continue + if stationCode not in stationList: + stationList.append(stationCode) + + sugestedStart = min(sugestedStart, start) + if end and sugestedEnd: + sugestedEnd = max(sugestedEnd, end) + else: + sugestedEnd = None + fd.close() + + if len(error): + raise Exception("\n".join(error)) + + print(" Loaded %d different stations" % len(stationList), file=sys.stderr) + if takeSugestions: + print(" Taking suggestion start date of %s " % formatDate(self.startDate), file=sys.stderr) + self.startDate = sugestedStart + print(" Taking suggestion end date of %s " % formatDate(self.endDate), file=sys.stderr) + self.endDate = sugestedEnd + + self.filename = filename + self.stationList = stationList + print("Done.", file=sys.stderr) + + def __convertHeader__(self, line, fdo): + + # Split line + items = line.split() + + if not self.takeSugestions: + if self.nat.hasStart: + print(" Using start from attribute.", file=sys.stderr) + self.startDate = self.nat.startDate + if self.nat.hasEnd: + print(" Using end from attribute.", file=sys.stderr) + self.endDate = self.nat.endDate + + nCode = items[2].strip() + if nCode != self.networkCode: + raise Exception("Wrong network code found: %s != %s" % (self.networkCode, nCode)) + + fdo.write("Nw: %s %s %s" % (nCode, formatDate(self.startDate), formatDate(self.endDate)) + "\n") + + self.nat.dump(fdo) + + def __convertLine__(self, line, fdo, atFront): + lnfmt = self.__fmtline__() + + # Split line + items = line.split() + + try: + code = items.pop(0) + except Exception as e: + raise Exception ("Missing Code on %s" % line) + + if code not in self.stationList: + raise Exception("Unknow station code $s" % code) + + try: + hummanStr(items.pop(0)) + except Exception as e: + raise Exception ("Missing Gain on %s" % line) + + try: + datalogger = items.pop(0) + except Exception as e: + raise Exception ("Missing Datalogger on %s" % line) + + try: + sensor = items.pop(0) + except Exception as e: + raise Exception ("Missing Sensor on %s" % line) + + try: + gaind = items.pop(0) + if float(gaind) != 1.0: + if not self.inst: + raise Exception("Instrument database needed to convert gain") + try: + dte = self.inst.dls[str(datalogger).split("%")[0]] + except Exception as e: + print(e, file=sys.stderr) + raise Exception("Datalogger %s not found" % str(datalogger).split("%")[0]) + datalogger += "%%%s" % (float(dte.gain) * float(gaind)) + print(" Converting gain multiplier to real gain using instrument DB on %s" % code, file=sys.stderr) + except Exception as e: + raise Exception ("Missing Gain on %s (%s)" % (line,str(e))) + + try: + channel = items.pop(0) + except Exception as e: + raise Exception ("Missing Channel on %s" % line) + + try: + latitude = items.pop(0) + except Exception as e: + raise Exception ("Missing Latitude on %s" % line) + + try: + longitude = items.pop(0) + except Exception as e: + raise Exception ("Missing Longitude on %s" % line) + try: + elevation = items.pop(0) + except Exception as e: + raise Exception ("Missing Elevation on %s" % line) + + try: + depth = items.pop(0) + except Exception as e: + raise Exception ("Missing Depth on %s" % line) + + #Orientation + try: + float(depth) + orientation = "ZNE" + except: + orientation = "Z" + (depth,a1,a2) = depth.split("/") + + a1n = float(a1) + if a1n == 0.0: + orientation+="1" + else: + orientation+="1(0.0,%s)"%a1 + + a2n = float(a2) + if a2n == 90.0: + orientation+="2" + else: + orientation+="2(0.0,%s)"%a2 + + # Start + try: + start = items.pop(0) + except Exception: + raise Exception ("Missing Start on %s" % line) + + try: + start = parseDate(start) + except Exception as e: + raise Exception("Invalide Start date: %s (%s) on %s" % (start, e, line)) + + #End + try: + end = items.pop(0) + except: + end = "" + + try: + end = parseDate(end) + except Exception as e: + raise Exception("Invalide End date: %s (%s) on %s" % (end, e, line)) + + [place, country] = self.sat.parseStationLine(line.split()) + description = "%s/%s" % (place, country) + + ## Prepare necessary output + if not atFront: + self.sma.dump(fdo, code) + self.sat.dump(fdo, code) + + for (start, end) in self.sma.getMappings(code, start, end): + fdo.write(lnfmt % (code, quote(description), datalogger, sensor, channel, orientation, latitude, longitude, elevation, depth, formatDate(start), formatDate(end)) + "\n") + + return code + + def convert(self, fdo, keepcomments = False, atFront = True): + if self.filename is None: + raise Exception("You should pre-load a tab file before before converting.") + + ## Obtain additional attribute classes if needed + if not self.nat: + self.nat = NetworkAttributes(self.networkCode, None) + if not self.sat: + self.sat = StationAttributes(self.networkCode, self.stationList, None) + if not self.sma: + self.sma = StationMappings(self.networkCode, self.stationList, None) + + # Parse in again the station lines and network header by the additional classes + print("Pre-Parsing Station/Network lines ... ", file=sys.stderr) + fd = open(self.filename) + for line in fd: + line = line.strip() + if not line or line[0] == "#": + continue + items = line.split() + if len(items) <= 5: + self.nat.parseNetworkLine(items) + elif len(items) <= 12: + self.sma.parseStationLine(items) + self.sat.parseStationLine(items) + fd.close() + + fd = open(self.filename) + oldcode="" # Station code of the last printed line + last="" # Type of the last printed line + print("Converting ... ", file=sys.stderr) + for line in fd: + line = line.strip() + if not line or line[0] == "#": + if last == "l" or last == "a" or last == "h": fdo.write("\n") + if keepcomments: fdo.write(line + "\n") + last = "c" + continue + items = line.split() + if len(items) <= 5: + self.__convertHeader__(line, fdo) + last = "h" + if (atFront): + fdo.write("\n") + self.sma.dump(fdo, None) + self.sat.dump(fdo, None) + last = "a" + fdo.write("\n") + elif len(items) <= 12: + if (last == "l" and items[0].strip() != oldcode) or last == "h": fdo.write("\n") + oldcode = self.__convertLine__(line, fdo, atFront) + last = "l" + pass + else: + print("input at %s" % line, file=sys.stderr) + fd.close() + +def main(): + # Creating the parser + parser = OptionParser(usage="Old tab to New tab converter", version="1.0", add_help_option=True) + + parser.add_option("", "--instdb", type="string", + help="Indicates the instrument databases file to use", dest="inst", default=None) + parser.add_option("", "--smap", type="string", + help="Indicates the station attribute file to use", dest="smap", default=None) + parser.add_option("", "--sat", type="string", + help="Indicates the station attribute file to use", dest="sat", default=None) + parser.add_option("", "--nat", type="string", + help="Indicates the station attribute file to use", dest="nat", default=None) + parser.add_option("-t", "--tab", type="string", + help="Indicates the tab file to convert", dest="tabFile", default=None) + parser.add_option("-f", "--filterf", type="string", + help="Indicates a folder containing the filters coefficients files", dest="ffolder", default=None) + parser.add_option("-n", "--net", type="string", + help="Indicates a two leter station code", dest="netCode", default=None) + parser.add_option("-g", "--globalsa", action="store_true", + help="Indicate that we should put a condensed version of the station attributes just below the network definition", dest="globalSa", default=False) + parser.add_option("-a", "--autotime", action="store_true", + help="Guess the start and end times for a network from the channel times", dest="autoTime", default=False) + parser.add_option("-c", "--clean", action="store_true", + help="Remove the comments and blank lines", dest="cleanFile", default=False) + + # Parsing & Error check + (options, args) = parser.parse_args() + error = False + + if len(args) != 1: + print("need an Output Filename or '-' for stdout", file=sys.stderr) + error = True + + if not options.tabFile: + print("tab file name not supplied", file=sys.stderr) + error = True + + if options.inst and not options.ffolder: + print("Filter folder not supplied.", file=sys.stderr) + error = True + + if options.tabFile and not os.path.isfile(options.tabFile): + print("supplied tab file (%s) is not a file" % options.tabFile, file=sys.stderr) + error = True + + if not options.netCode: + print("network code not supplied", file=sys.stderr) + error = True + + #if options.autoTime and (options.netStart or options.netEnd): + # print >> sys.stderr, "options Auto Time and Network Start/End times are exclusive" + # return + + if error: + print("use -h for getting a help on usage", file=sys.stderr) + return + + if args[0] != "-": + fdo = open(args[0], "w") + else: + fdo = sys.stdout + + # Execution + try: + cnv = TabConverter(options.netCode.upper()) + cnv.preload(options.tabFile, options.autoTime) + + if options.inst or options.smap or options.nat or options.sat: + print("Loading optional files: ", file=sys.stderr) + + if options.inst and os.path.isfile(options.inst): + cnv.loadInstrumentsFile(options.inst, options.ffolder) + + if options.smap and os.path.isfile(options.smap): + cnv.loadStationMapping(options.smap) + + if options.nat and os.path.isfile(options.nat): + cnv.loadNetworkAttribute(options.nat) + + if options.sat and os.path.isfile(options.sat): + cnv.loadStationAttribute(options.sat) + print("Done.", file=sys.stderr) + + cnv.convert(fdo, not options.cleanFile, options.globalSa) + except Exception as e: + print("", file=sys.stderr) + print("Error on processing: %s" % e, file=sys.stderr) + + fdo.close() + +if __name__ == "__main__": + main() diff --git a/bin/tabinvmodifier b/bin/tabinvmodifier new file mode 100755 index 0000000..5278163 --- /dev/null +++ b/bin/tabinvmodifier @@ -0,0 +1,380 @@ +#!/usr/bin/env seiscomp-python + +################################################################################ +# Copyright (C) 2012-2013, 2020 Helmholtz-Zentrum Potsdam - Deutsches GeoForschungsZentrum GFZ +# +# tabinvmodifier -- Tool for inventory modification using nettab files. +# +# This software is free software and comes with ABSOLUTELY NO WARRANTY. +# +# Author: Marcelo Bianchi +# Email: mbianchi@gfz-potsdam.de +################################################################################ + +from __future__ import print_function +import os +import sys +import datetime, time +from nettab.lineType import Nw, Sa, Na, Ia +from nettab.basesc3 import sc3 +import seiscomp.datamodel, seiscomp.io, seiscomp.client, seiscomp.core, seiscomp.logging + +class Rules(object): + def __init__(self, relaxed = False): + self.relaxed = relaxed + self.attributes = {} + self.iattributes = [] + return + + @staticmethod + def _overlaps(pstart, pend, cstart, cend): + if pend: + if pend > cstart: + if not cend or pstart < cend: + return True + else: + if not cend or pstart < cend: + return True + return False + + def Nw(self, nw): + key = (nw.code, nw.start, nw.end) + if key in self.attributes: + raise Exception("Nw (%s/%s-%s) is already defined." % key) + self.attributes[key] = {} + self.attributes[key]["Sa"] = [] + self.attributes[key]["Na"] = [] + return key + + def Sa(self, key, sa): + try: + items = self.attributes[key]["Sa"] + except KeyError: + raise Exception ("Nw %s/%s-%s not found in Ruleset" % key) + items.append(sa) + + def Na(self, key, na): + try: + items = self.attributes[key]["Na"] + except KeyError: + raise Exception ("Nw %s/%s-%s not found in Ruleset" % key) + items.append(na) + + def Ia(self, ia): + self.iattributes.append(ia); + + def findKey(self, ncode, nstart, nend): + for (code, start, end) in self.attributes: + if code == ncode and self._overlaps(start, end, nstart, nend): + return (code, start, end) + return None + + def getInstrumentsAttributes(self, elementId, elementType): + att = {} + for item in self.iattributes: + if item.match(elementId, elementType): + att[item.Key] = item.Value + return att + + def getNetworkAttributes(self, key): + att = {} + for item in self.attributes[key]["Na"]: + att[item.Key] = item.Value + return att + + def getStationAttributes(self, key, ncode, scode, lcode, ccode, start, end): + att = {} + for item in self.attributes[key]["Sa"]: + if item.match(scode, lcode, ccode, start, end, self.relaxed): + att[item.Key] = item.Value + return att + +class InventoryModifier(seiscomp.client.Application): + def __init__(self, argc, argv): + seiscomp.client.Application.__init__(self, argc, argv) + self.setMessagingUsername("iModify") + + self.rules = None + self.relaxed = False + self.outputFile = None + + def _digest(self, tabFilename, rules = None): + if not tabFilename or not os.path.isfile(tabFilename): + raise Exception("Supplied filename is invalid.") + + if not rules: + rules = Rules(self.relaxed) + + try: + fd = open(tabFilename) + for line in fd: + obj = None + line = line.strip() + if not line or line[0] == "#": continue + if str(line).find(":") == -1: + raise Exception("Invalid line format '%s'" % line) + (Type, Content) = line.split(":",1) + + if Type == "Nw": + nw = Nw(Content) + key = rules.Nw(nw) + elif Type == "Sg": + raise Exception("Type not supported.") + elif Type == "Na": + na = Na(Content) + rules.Na(key, na) + elif Type == "Sa": + sa = Sa(Content) + rules.Sa(key, sa) + elif Type == "Sr": + raise Exception("Type not supported.") + elif Type == "Ia": + ia = Ia(Content) + rules.Ia(ia) + elif Type == "Se": + raise Exception("Type not supported.") + elif Type == "Dl": + raise Exception("Type not supported.") + elif Type == "Cl": + raise Exception("Type not supported.") + elif Type == "Ff": + raise Exception("Type not supported.") + elif Type == "If": + raise Exception("Type not supported.") + elif Type == "Pz": + raise Exception("Type not supported.") + except Exception as e: + raise e + + finally: + if fd: + fd.close() + return rules + + def validateParameters(self): + outputFile = None + rulesFile = None + + if self.commandline().hasOption("rules"): + rulesFile = self.commandline().optionString("rules") + + if self.commandline().hasOption("output"): + outputFile = self.commandline().optionString("output") + + if self.commandline().hasOption("relaxed"): + self.relaxed = True + + if self.commandline().hasOption("inventory-db") and outputFile is None: + print("Cannot send notifiers when loading inventory from file.", file=sys.stderr) + return False + + if self.commandline().unrecognizedOptions(): + print("Invalid options: ", end=' ', file=sys.stderr) + for i in self.commandline().unrecognizedOptions(): + print(i, end=' ', file=sys.stderr) + print("", file=sys.stderr) + return False + + if not rulesFile: + print("No rule file was supplied for processing", file=sys.stderr) + return False + + if not os.path.isfile(rulesFile): + argv0 = os.path.basename(self.arguments()[0]) + print("%s: %s: No such file or directory" % (argv0, rulesFile), file=sys.stderr) + return False + + if self.commandline().hasOption("inventory-db"): + self.setDatabaseEnabled(False, False) + self.setMessagingEnabled(False) + + self.rules = self._digest(rulesFile, self.rules) + self.outputFile = outputFile + return True + + def createCommandLineDescription(self): + seiscomp.client.Application.createCommandLineDescription(self) + + self.commandline().addGroup("Rules") + self.commandline().addStringOption("Rules", "rules,r", "Input XML filename") + self.commandline().addOption("Rules", "relaxed,e", "Relax rules for matching NSLC items") + + self.commandline().addGroup("Dump") + self.commandline().addStringOption("Dump", "output,o", "Output XML filename") + + def initConfiguration(self): + value = seiscomp.client.Application.initConfiguration(self) + self.setLoggingToStdErr(True) + self.setDatabaseEnabled(True, True) + self.setMessagingEnabled(True) + self.setLoadInventoryEnabled(True) + return value + + def send(self, *args): + while not self.connection().send(*args): + seiscomp.logging.warning("send failed, retrying") + time.sleep(1) + + def send_notifiers(self, group): + Nsize = seiscomp.datamodel.Notifier.Size() + + if Nsize > 0: + seiscomp.logging.info("trying to apply %d change%s" % (Nsize,"s" if Nsize != 1 else "" )) + else: + seiscomp.logging.info("no changes to apply") + return 0 + + Nmsg = seiscomp.datamodel.Notifier.GetMessage(True) + it = Nmsg.iter() + msg = seiscomp.datamodel.NotifierMessage() + + maxmsg = 100 + sent = 0 + mcount = 0 + + try: + try: + while it.get(): + msg.attach(seiscomp.datamodel.Notifier_Cast(it.get())) + mcount += 1 + if msg and mcount == maxmsg: + sent += mcount + seiscomp.logging.debug("sending message (%5.1f %%)" % (sent / float(Nsize) * 100.0)) + self.send(group, msg) + msg.clear() + mcount = 0 + next(it) + except: + pass + finally: + if msg.size(): + seiscomp.logging.debug("sending message (%5.1f %%)" % 100.0) + self.send(group, msg) + msg.clear() + seiscomp.logging.info("done") + return mcount + + @staticmethod + def _loop(obj, count): + return [ obj(i) for i in range(count) ] + + @staticmethod + def _collect(obj): + code = obj.code() + start = datetime.datetime.strptime(obj.start().toString("%Y %m %d %H %M %S"), "%Y %m %d %H %M %S") + try: + end = obj.end() + end = datetime.datetime.strptime(end.toString("%Y %m %d %H %M %S"), "%Y %m %d %H %M %S") + except: + end = None + return (code, start, end) + + @staticmethod + def _modifyInventory(mode, obj, att): + valid = sc3._findValidOnes(mode) + if not att: + return + + # Why repeat the code in basesc3.py (sc3::_fillSc3())? + # What about if there are existing comments/pids - won't + # this code get the count wrong?? *FIXME* + commentNum = 0 + for (k,p) in att.items(): + try: + if k == 'Comment': + # print('DEBUG: Adding comment', p) + if p.startswith('Grant'): + # 2020: These belong in DOI metadata, not here. + continue + + c = seiscomp.datamodel.Comment() + c.setText(p) + c.setId(str(commentNum)) + commentNum += 1 + obj.add(c) + continue + + if k == 'Pid': + print('DEBUG: Adding Pid as comment', p) + c = seiscomp.datamodel.Comment() + (typ, val) = p.split(':', 1) + s = '{"type":"%s", "value":"%s"}' % (typ.upper(), val) + c.setText(s) + c.setId('FDSNXML:Identifier/' + str(commentNum)) + commentNum += 1 + obj.add(c) + continue + + p = valid['attributes'][k]['validator'](p) + getattr(obj, 'set'+k)(p) + except KeyError: + import string + hint = '' + if k[0] in string.lowercase: + hint = " (try '%s' instead)" % ( k[0].upper() + k[1:]) + print('Modifying %s: \'%s\' is not a valid key%s' % (mode, k, hint), file=sys.stderr) + obj.update() + return + + def run(self): + rules = self.rules + iv = seiscomp.client.Inventory.Instance().inventory() + + if not rules: + return False + + if not iv: + return False + + seiscomp.logging.debug("Loaded %d networks" % iv.networkCount()) + if self.outputFile is None: + seiscomp.datamodel.Notifier.Enable() + self.setInterpretNotifierEnabled(True) + + for net in self._loop(iv.network, iv.networkCount()): + (ncode, nstart, nend) = self._collect(net) + key = rules.findKey(ncode, nstart, nend) + if not key: continue + att = rules.getNetworkAttributes(key) + self._modifyInventory("network", net, att) + seiscomp.logging.info("%s %s" % (ncode, att)) + for sta in self._loop(net.station, net.stationCount()): + (scode, sstart, send) = self._collect(sta) + att = rules.getStationAttributes(key, ncode, scode, None, None, sstart, send) + self._modifyInventory("station", sta, att) + if att: seiscomp.logging.info(" %s %s" % (scode, att)) + for loc in self._loop(sta.sensorLocation, sta.sensorLocationCount()): + (lcode, lstart, lend) = self._collect(loc) + att = rules.getStationAttributes(key, ncode, scode, lcode, None, lstart, lend) + self._modifyInventory("location", loc, att) + if att: seiscomp.logging.info(" %s %s" % (lcode, att)) + for cha in self._loop(loc.stream, loc.streamCount()): + (ccode, cstart, cend) = self._collect(cha) + att = rules.getStationAttributes(key, ncode, scode, lcode, ccode, cstart, cend) + self._modifyInventory("channel", cha, att) + if att: seiscomp.logging.info(" %s %s" % (ccode, att)) + + for sensor in self._loop(iv.sensor, iv.sensorCount()): + att = rules.getInstrumentsAttributes(sensor.name(), "Se") + self._modifyInventory("sensor", sensor, att) + + for datalogger in self._loop(iv.datalogger, iv.dataloggerCount()): + att = rules.getInstrumentsAttributes(datalogger.name(), "Dl") + self._modifyInventory("datalogger", datalogger, att) + + return True + + def done(self): + if self.outputFile: + ar = seiscomp.io.XMLArchive() + ar.create(self.outputFile) + ar.setFormattedOutput(True) + ar.writeObject(seiscomp.client.Inventory.Instance().inventory()) + ar.close() + else: + self.send_notifiers("INVENTORY") + seiscomp.client.Application.done(self) + +if __name__ == "__main__": + app = InventoryModifier(len(sys.argv), sys.argv) + sys.exit(app()) diff --git a/bin/tau_remodl b/bin/tau_remodl new file mode 100755 index 0000000..6718f4d Binary files /dev/null and b/bin/tau_remodl differ diff --git a/bin/tau_setbrn b/bin/tau_setbrn new file mode 100755 index 0000000..3d4269a Binary files /dev/null and b/bin/tau_setbrn differ diff --git a/bin/timeout b/bin/timeout new file mode 100755 index 0000000..96fc477 Binary files /dev/null and b/bin/timeout differ diff --git a/bin/trylock b/bin/trylock new file mode 100755 index 0000000..c7c18a2 Binary files /dev/null and b/bin/trylock differ diff --git a/bin/waitlock b/bin/waitlock new file mode 100755 index 0000000..91c6673 Binary files /dev/null and b/bin/waitlock differ diff --git a/etc/defaults/fdsnws.cfg b/etc/defaults/fdsnws.cfg new file mode 100644 index 0000000..7a9220b --- /dev/null +++ b/etc/defaults/fdsnws.cfg @@ -0,0 +1,17 @@ +# Defines a list of modules loaded at startup. +plugins = ${plugins}, fdsnxml + +# SeisComP applications access waveform data through the RecordStream +# interface. Please consult the SeisComP documentation for a list of supported +# services and their configuration. +# This parameter configures the RecordStream URL, format: +# [service://]location[#type]. "service" is the name of the recordstream +# implementation. If "service" is not given "file://" is implied. +recordstream = sdsarchive://@ROOTDIR@/var/lib/archive + + +# Set the number of bytes to buffer for each chunk of waveform data served +# to the client. The lower the buffer the higher the overhead of Python Twisted. +# The higher the buffer the higher the memory usage per request. 100kB seems +# to be a good trade-off. +recordBulkSize = 102400 diff --git a/etc/defaults/gdrt_plugin.cfg b/etc/defaults/gdrt_plugin.cfg new file mode 100644 index 0000000..6a87685 --- /dev/null +++ b/etc/defaults/gdrt_plugin.cfg @@ -0,0 +1,5 @@ +# UDP port for receiving GDRT messages. By default port 9999 will be used. +plugins.gdrt.udpport = 9999 + +# Location of station list file. +plugins.gdrt.stationsFrom = stations.txt diff --git a/etc/defaults/global.cfg b/etc/defaults/global.cfg new file mode 100644 index 0000000..634bd2e --- /dev/null +++ b/etc/defaults/global.cfg @@ -0,0 +1,93 @@ +# Default plugins to load. Application specific configuration +# files should use the 'plugins' entry to specify additional +# plugins otherwise when using 'core.plugins' also these +# default values are going to be overwritten. +# +# To be able to read from all supported databases all available +# database plugins are loaded as 'core'. +# All currently supported db backends: dbmysql, dbpostgresql, dbsqlite3 +core.plugins = dbmysql + +# Use log level 2 (error and warning) +logging { + + level = 2 + + # Use logfiles. It is commented by default to allow applications to define + # console output with their hard coded defaults. If this setting is enabled + # it would otherwise always override the applications default logging + # backend. + #file = true + + # Rotate the logfiles + file { + rotator = true + + # Rotate each 86400 seconds (1 day) + rotator.timeSpan = 86400 + + # Keep 7 rotated log files + rotator.archiveSize = 7 + } + +} + +# Server connection +connection.server = localhost/production + +# The connection timeout +connection.timeout = 3 + +# How to transfer messages (binary, xml)? +connection.encoding = binary + +# Use slink (seedlink) as record source service. +recordstream = slink://localhost:18000 + +# The agencyID to use when tagging processing results +agencyID = GFZ + +# Organization name used mainly by ArcLink and SeedLink. +organization = Unset + +# Configures the default filters selectable in manual picker. +# The entry with a leading "@" is selected as default filter. +picker.filters = \ + "BP 0.1 - 1 Hz 3rd order;RMHP(10)>>ITAPER(30)>>BW(3,0.1,1)", \ + "BP 0.1 - 2 Hz 3rd order;RMHP(10)>>ITAPER(30)>>BW(3,0.1,2)", \ + "BP 0.4 - 1 Hz 3rd order;RMHP(10)>>ITAPER(30)>>BW(3,0.4,1)", \ + "@BP 0.7 - 2 Hz 3rd order;RMHP(10)>>ITAPER(30)>>BW(3,0.7,2)", \ + "BP 1 - 3 Hz 3rd order;RMHP(5)>>ITAPER(10)>>BW(3,1.0,3)", \ + "BP 1 - 5 Hz 3rd order;RMHP(5)>>ITAPER(10)>>BW(3,1.0,5)", \ + "BP 2 - 4 Hz 3rd order;RMHP(5)>>ITAPER(10)>>BW(3,2.0,4)", \ + "BP 3 - 6 Hz 3rd order;RMHP(5)>>ITAPER(10)>>BW(3,3.0,6)", \ + "BP 4 - 8 Hz 3rd order;RMHP(5)>>ITAPER(10)>>BW(3,4.0,8)", \ + "HP 3 Hz 3rd order;RMHP(1)>>ITAPER(2)>>BW_HP(3,3)", \ + "BP 0.7 - 2 Hz + STA/LTA(1,50);RMHP(10)->ITAPER(30)->BW(3,0.7,2)->STALTA(1,50)" + +# Configure the columns of the event list that are visible initially. +# The first column containing the origin time is always visible and cannot +# be hidden. +# Possible values are: +# * Type +# * M +# * MType +# * Phases +# * Lat +# * Lon +# * Depth +# * Stat +# * Agency +# * Region +# * ID +eventlist.visibleColumns = M, MType, Phases, RMS, Lat, Lon, Depth, Stat, Agency, Region, ID + +# Default travel time table configuration. Plugins can be added to for custom +# travel time table implementations. +# This configuration can be used by applications that need to know which +# interfaces are activated and which tables they define. +ttt { + libtau.tables = iasp91, ak135 + LOCSAT.tables = iasp91, tab + homogeneous.tables = "" +} diff --git a/etc/defaults/ql2sc.cfg b/etc/defaults/ql2sc.cfg new file mode 100644 index 0000000..3555155 --- /dev/null +++ b/etc/defaults/ql2sc.cfg @@ -0,0 +1,22 @@ +# Send journals and event specific updates to the EVENT group. +connection.primaryGroup = EVENT + +# Receive objects from EVENT group. This is necessary to wait for event +# association of imported origins. +connection.subscriptions = EVENT + +# Number of seconds to fetch missed updates on start up. +backLog = 1800 + +# Number of public objects to cache. +cacheSize = 5000 + +# Maximum number of notifiers to batch in one message. If set to 0 no size +# limit is enforced. Make sure to not hit the overall message size limited of +# 16MiB which is enforced by the messaging system. +batchSize = 2000 + +# If event synchronisation is enabled and an incoming origin is not yet +# associated with an event on the target machine then this timeout defines +# the maximum number of seconds to wait for an association. +eventAssociationTimeout = 10 diff --git a/etc/defaults/scalert.cfg b/etc/defaults/scalert.cfg new file mode 100644 index 0000000..feb6d69 --- /dev/null +++ b/etc/defaults/scalert.cfg @@ -0,0 +1,3 @@ +# Defines a list of message groups to subscribe to. The default is usually +# given by the application and does not need to be changed. +connection.subscriptions = EVENT, LOCATION, MAGNITUDE diff --git a/etc/defaults/scamp.cfg b/etc/defaults/scamp.cfg new file mode 100644 index 0000000..b9a51d0 --- /dev/null +++ b/etc/defaults/scamp.cfg @@ -0,0 +1,18 @@ +# Send to the AMPLITUDE group +connection.primaryGroup = AMPLITUDE + +# Receive objects from PICK, AMPLITUDE and LOCATION group +connection.subscriptions = PICK, AMPLITUDE, LOCATION + +# The amplitudes to compute triggered by an incoming Origin +amplitudes = MLv, mb, mB, Mwp + +# The minimum arrival weight within an origin to compute amplitudes +# for the associated pick. +amptool.minimumPickWeight = 0.5 + +# Timeout in seconds of the first data packet of waveform data acquisition. +amptool.initialAcquisitionTimeout = 30 + +# Timeout in seconds of any subsequent data packet of waveform data acquisition. +amptool.runningAcquisitionTimeout = 2 diff --git a/etc/defaults/scardac.cfg b/etc/defaults/scardac.cfg new file mode 100644 index 0000000..6a769ee --- /dev/null +++ b/etc/defaults/scardac.cfg @@ -0,0 +1,6 @@ +archive = @ROOTDIR@/var/lib/archive +batchSize = 100 +threads = 1 +jitter = 0.5 +deepScan = false + diff --git a/etc/defaults/scautoloc.cfg b/etc/defaults/scautoloc.cfg new file mode 100644 index 0000000..241af1f --- /dev/null +++ b/etc/defaults/scautoloc.cfg @@ -0,0 +1,84 @@ +## Send to the LOCATION group +connection.primaryGroup = LOCATION + +## Receive objects from PICK and AMPLITUDE groups +connection.subscriptions = PICK, AMPLITUDE + +## max. permissible RMS for a location to be reported +#autoloc.maxRMS = 3.5 + +## max. individual residual (unweighted) for a pick to +## be used in location +#autoloc.maxResidual = 7.0 + +## Max. secondary azimuth gap for an origin to be reported by. +## Default is 360 degrees, i.e. no restriction based on this parameter. +#autoloc.maxSGAP = 360 + +## Arrivals with exceptionally large amplitudes may be +## flagged as XXL, allowing (in future) faster, preliminary +## "heads-up" alerts. +#autoloc.thresholdXXL = 10000. + +#autoloc.maxStationDistance = 180 +#autoloc.maxDistanceXXL = 10 +#autoloc.minPhaseCount = 6 +#autoloc.minPhaseCountXXL = 4 + +## If the station count for stations at < 105 degrees +## distance exceeds this number, no picks at > 105 degrees will be +## used in location. They will be loosely associated, though. +#autoloc.minStaCountIgnorePKP = 30 + +## Clean-up interval for removing old/unused objects, in seconds +## Don't change. +#autoloc.cleanupInterval = 3600 + +## max. age for objects kept in memory, in seconds +## Default is 6 hours - don't change. +#autoloc.maxAge = 21600 + +## Don't change. +#autoloc.wakeupInterval = 5 + +## Grid configuration +#autoloc.grid = @DATADIR@/scautoloc/grid.conf + +## Station configuration +#autoloc.stationConfig = @DATADIR@/scautoloc/station.conf + +## This is only relevant in offline/testing mode +#locator.stationLocations = @DATADIR@/scautoloc/station-locations.conf + +## Manual picks/origins can be fed back into autoloc for two purposes: +## * passive association to a solution from a "trusted" source so that we +## avoid fake or wrong locations due to events outside our area of interest +## * use the manual origins in further processing, especially the manual picks. +## Possibly also honor an operator specified fixed depth. +## Currently we only permit use of manual picks which are then used +## instead of the corresponding automatic picks (if existing) +# autoloc.useManualPicks = false + + +## Log all picks received by scautoloc to this file +autoloc.pickLog = @LOGDIR@/autoloc-picklog + +# Amplitude type to be used as SNR amplitude +# Don't change unless you know exactly what you are doing. +autoloc.amplTypeSNR = snr + +# Amplitude type to be used as absolute amplitude +# Don't change unless you know exactly what you are doing. +autoloc.amplTypeAbs = mb + +# Use manual origins from our own agency. Essentially it means to +# use manual picks from manual origins, which is assumed to be +# better than using only automatic picks. +autoloc.useManualOrigins = false +# NOTE: If you set the above to true, then make sure to add the +# LOCATION group to connection.subscriptions! + +# If autoloc.useManualOrigins is true, adopt the depth from manual +# origins, which is especially important if it was fixed by the analyst. +autoloc.adoptManualDepth = false + diff --git a/etc/defaults/scautopick.cfg b/etc/defaults/scautopick.cfg new file mode 100644 index 0000000..8153462 --- /dev/null +++ b/etc/defaults/scautopick.cfg @@ -0,0 +1,87 @@ +# Send to the PICK group +connection.primaryGroup = PICK + +# Send amplitudes to this group +connection.amplitudeGroup = AMPLITUDE + +# Receive objects from CONFIG group +connection.subscriptions = CONFIG + +# The filter used to trigger +filter = "RMHP(10)>>ITAPER(30)>>BW(4,0.7,2)>>STALTA(2,80)" + +# The time correction applied to a detected pick +timeCorrection = -0.8 + +# The record ringbuffer size in seconds +ringBufferSize = 300 + +# The leadTime defines the time in seconds to +# start picking on the streams before current +# time +leadTime = 60 + +# The initTime defines a timespan in seconds +# for that the picker is blind after initialization +# This time is needed to initialize the filter and +# depends on it +initTime = 60 + +# Interpolate gaps linearly? This is valid for gaps +# short than thresholds.maxGapLength +gapInterpolation = false + +# For which value on a filtered stream is +# a pick detected +thresholds.triggerOn = 3 + +# The value the filtered stream must reach to +# enable detection again +thresholds.triggerOff = 1.5 + +# The maximum gap length to handle. Gaps larger +# than this size reset the picker +thresholds.maxGapLength = 4.5 + +# The timeWindow used to compute a maximum (snr) +# amplitude on the filtered stream +thresholds.amplMaxTimeWindow = 10 + +thresholds.deadTime = 30 +thresholds.minAmplOffset = 3 + +# The amplitudes to compute triggered by +# a new P Pick continuously without having +# an Origin +amplitudes = MLv, mb, mB + +# Configures the picker to use. By default only simple +# STALTA detections are emitted as picks. To enable "repicking" +# define a picker algorithm here. +picker = "" + +# Configures the secondary picker to be used. +spicker = "" + +# Configures the feature extraction type to be used +fx = "" + +# If enabled the all streams are used for picking that are received by the +# picker. This option has only effect if a file is used as input which contains +# more data than the picker requests or if amplitudes are enabled which are using +# the horizontal components. +useAllStreams = false + +# If enabled the all secondary pickers that were triggered by a previous pick +# will be terminated when a new detection or pick has been found. This aims to +# avoid the case where an S phase is wrongly picked as P but would also be +# picked as S by the secondary picker. But suppressing the S pick can lead to +# undesired results. It might be better in some situations to have two picks +# (P and S) instead only a wrong P. +killPendingSPickers = true + +# If enabled and a picker is configured then detections are sent as well. +# To distinguish between detections and picks the evaluation mode of the pick +# is set to manual. This is meant to be a debug option which can be used to +# compare detections and picks by their evaluation mode. +sendDetections = false diff --git a/etc/defaults/scesv.cfg b/etc/defaults/scesv.cfg new file mode 100644 index 0000000..5b7f150 --- /dev/null +++ b/etc/defaults/scesv.cfg @@ -0,0 +1,2 @@ +# Messaging subscriptions +connection.subscriptions = EVENT, MAGNITUDE, LOCATION, FOCMECH diff --git a/etc/defaults/scevent.cfg b/etc/defaults/scevent.cfg new file mode 100644 index 0000000..1d24959 --- /dev/null +++ b/etc/defaults/scevent.cfg @@ -0,0 +1,203 @@ +# Send to the EVENT group +connection.primaryGroup = EVENT + +# Receive objects from LOCATION, MAGNITUDE and FOCMECH group +connection.subscriptions = LOCATION, MAGNITUDE, FOCMECH, EVENT + + +# A magnitudes needs at least 4 stationmagnitudes +# to become preferred +eventAssociation.minimumMagnitudes = 4 + +# An automatic origin will be associated to an +# event when it has at least 10 phases +eventAssociation.minimumDefiningPhases = 10 + +# Minimum score of an automatic origin to be allowed to +# form an new Event. This requires an activated score +# plugin. See parameter score. +# If set the minimumDefiningPhases has no effect at as +# this check will be superseded by the score check. It is +# the task of the score processor to evaluate a proper +# score for all input origins. +# By default this option is deactivated. +#eventAssociation.minimumScore = 1 + +# An automatic origin will be associated to an +# event when it falls inside this region. +# Format: min-lat, min-lon, max-lat, max-lon +#eventAssociation.region.rect = -90,-180,90,180 + +# Search 1800 seconds BEFORE origin time of a +# new location for matching events +eventAssociation.eventTimeBefore = 1800 + +# Search 1800 seconds AFTER origin time of a +# new location for matching events +eventAssociation.eventTimeAfter = 1800 + +# An origin will be associated to an existing +# event when at least 3 picks matches with +# former associated origins +eventAssociation.minimumMatchingArrivals = 3 + +# If this time window in seconds is negative, pickIDs +# are compared to find matching arrivals. A non negative +# value (including 0) compares pick times regardless +# of the pickID. +# Pass: |pick1.time - pick2.time| <= threshold +eventAssociation.maximumMatchingArrivalTimeDiff = -1 + +# This parameter is only used in conjunction with +# eventAssociation.maximumMatchingArrivalTimeDiff. If a station +# has multiple associated arrivals for a particular event, this +# flag defines if the time distance of a new pick to all arrivals +# must be within eventAssociation.maximumMatchingArrivalTimeDiff +# or if one matching arrival is enough. +eventAssociation.compareAllArrivalTimes = true + +# Associates an origin with an existing event +# if the origin time differs not more +# than 60 seconds unless the minimumMatchingArrivals +# criteria matches. +eventAssociation.maximumTimeSpan = 60 + +# Associates an origin to an existing event +# when the location differs not more +# than 5 degrees unless the minimumMatchingArrivals +# criteria matches +eventAssociation.maximumDistance = 5 + +# Minimum number of station magnitudes required for Mw(mB) to be considered as +# preferred magnitude. +eventAssociation.minMwCount = 8 + +# If false then the station count rules out the magnitude priority +# which is only taken into account if two magnitudes have the +# same station count. +# +# If true then the priority rules out the station count +# which is only taken into account if two magnitudes have the +# same priority. +eventAssociation.magPriorityOverStationCount = false + +# Minimum number of station magnitudes which ensures that Mw(mB) will be +# preferred and not mb. +eventAssociation.mbOverMwCount = 30 + +# Average between mb and Mw(mB) which must be exceeded to become Mw(mB) +# preferred. +eventAssociation.mbOverMwValue = 6 + +# The magnitude type priority list +# Magnitudes with other types cannot become +# preferred magnitudes +eventAssociation.magTypes = M + +# The agencyID priority list +# When the eventtool comes to the point to select a preferred +# origin it orders all origins by its +# agency priority and selects then the best one among the +# highest priority agency. +# It also defines the agency priority for custom priority +# checks (eventAssociation.priorities) +#eventAssociation.agencies = GFZ + +# The author priority list +# When the eventtool comes to the point to select a preferred +# origin it orders all origins by its +# author priority and selects then the best one among the +# highest priority author. +# It also defines the author priority for custom priority +# checks (eventAssociation.priorities) +#eventAssociation.authors = scautoloc@localhost + +# The general priority list to decide if an origin becomes preferred. The +# priority decreases in the order of the parameters. This list is not used +# unless this parameter is activated. +# Empty priority list: scevent replicates the default hard wired behaviour: +# AGENCY, STATUS, PHASES_AUTOMATIC, TIME_AUTOMATIC +# Each item in the list corresponds to a check that is performed. Each check +# computes a score of the incoming origin (s1) and the current preferred origin +# (s2). If the s1 is lower than s2, the incoming origin is rejected and does +# not become preferred. All subsequent checks are ignored. If s1 is equal to +# s2, the next check in the list is performed. If s1 is larger than s2, the +# origin becomes preferred and all subsequent checks are ignored. +# Available tokens: +# AGENCY: check based on agency priorities +# AUTHOR: check based on author priorities +# MODE: evaluation mode priority: 0 = unset, 1 = automatic, 2 = manual, manual +# over-rules automatic +# STATUS: priority combined from evaluation status and evaluation mode: -100 = +# status is rejected, -1 = status is reported, 0 = status is preliminary or +# status is unset and mode is automatic, 1 = status is confirmed or status is +# unset and mode is manual, 2 = status is reviewed, 3 = status is final, +# METHOD: check based on the method priorities +# PHASES: higher phase count = higher priority +# PHASES_AUTOMATIC: only checks phase priorities for incoming automatic origins +# RMS: lower rms = higher priority +# RMS_AUTOMATIC: only check RMS on incoming automatic origins +# TIME: more recent origins (creationTime) have higher priorities +# TIME_AUTOMATIC: only check creationTime priority on incoming automatic +# origins +# SCORE: evaluates the score according to a configured ScoreProcessor and +# prefers the origin/focalmechanism with the highest score +#eventAssociation.priorities = AGENCY, STATUS, PHASES_AUTOMATIC, TIME_AUTOMATIC + +# If true, one magnitude will be preferred even if magnitude criteria are +# not fullfilled. +eventAssociation.enableFallbackMagnitude = false + +# The eventID prefix +# The eventID format is [prefix][year][code], e.g. gfz2008fdvg +eventIDPrefix = "gfz" + +# Defines the pattern to generate an event ID. +# %p : prefix +# %Y : year +# %[w]c: alpha character +# %[w]C: upper case alpha character +# %[w]d: decimal +# %[w]x: hexadecimal +# %[w]X: upper case hexadecimal +eventIDPattern = "%p%Y%04c" + +# Configures the number of event ID slots to look back and forth when an event +# ID is already taken. The default in previous versions was 5. Now -1 means +# that the margin is determined automatically based on +# "eventAssociation.eventTimeBefore" and "eventAssociation.eventTimeAfter". +# According to the configured "eventIDPattern" a fixed time range per slot can +# be computed and with that width the number of look ahead slots and look back +# slots can be computed based on the given time ranges for event association. +eventIDLookupMargin = -1 + +# Configures a timespan in seconds to delay origin association +#eventAssociation.delayTimeSpan = 0 + +# AgencyID filter used to delay origin association if +# eventAssociation.delayTimeSpan > 0 +#eventAssociation.delayFilter.agencyID = agency + +# Author filter used to delay origin association if +# eventAssociation.delayTimeSpan > 0 +#eventAssociation.delayFilter.author = author + +# evaluationMode filter used to delay origin association if +# eventAssociation.delayTimeSpan > 0. Allowed values are "manual" or "automatic" +#eventAssociation.delayFilter.evaluationMode = automatic + +# Defines whether to associate or to ignore origins derived from CMT/MT +# inversions. +eventAssociation.ignoreFMDerivedOrigins = true + +# If the preferred origin has evaluation status 'rejected' the event type will +# be set as 'not existing' unless the event type has been fixed by an operator +# or the preferred origin has been fixed. +eventAssociation.declareFakeEventForRejectedOrigin = false + +# Allows to match picks that are associated with weight 0 +eventAssociation.allowLooseAssociatedArrivals = false + +# If enabled then the EventDescription with type 'Flinn-Engdahl region' +# will be populated with the Flinn-Engdahl region name. +populateFERegion = false diff --git a/etc/defaults/scevtlog.cfg b/etc/defaults/scevtlog.cfg new file mode 100644 index 0000000..40df3f4 --- /dev/null +++ b/etc/defaults/scevtlog.cfg @@ -0,0 +1,13 @@ +connection.username = scevtlog +connection.subscriptions = EVENT, LOCATION, MAGNITUDE, PICK, AMPLITUDE + +# The output directory +directory = @LOGDIR@/events + +# The format to use to log events. +# Possible formats are: autoloc1, autoloc3 and xml. +# For compatibility reasons autoloc3 is the default +# format, but it is recommended to use xml, because +# it can be converted in autoloc1 and autoloc3 using +# scbulletin. +format = xml diff --git a/etc/defaults/scheli.cfg b/etc/defaults/scheli.cfg new file mode 100644 index 0000000..1425b1a --- /dev/null +++ b/etc/defaults/scheli.cfg @@ -0,0 +1,75 @@ +# List of stream codes to be plotted (net.sta.loc.cha). If not in capture mode +# only the first stream is shown. When using a list, the first entry is +# considered. Use commas for separating streams. +# Example: GR.MOX..BHZ +#heli.streams = GR.MOX..BHZ + +# Filter to be applied on the data. +#heli.filter = BW(3,0.7,2.0) + +# Filter to be applied on the data. +heli.numberOfRows = 48 + +# Length of data per trace. +heli.rowTimeSpan = 1800 + +# The time format used to print the start and end time of the whole plot (upper +# right corner). The format specification is the one used in the strftime +# function (man strftime). +heli.timeFormat = %F + +# Sets current time to last data sample +heli.recordTime = false + +# Line width of traces. +heli.lineWidth = 1 + +# A list of alternating row colors cycled through for painting traces. +heli.colors = FF0000, 0000FF + +# Use anti aliasing to plot the traces. The default uses the settings from +# scheme.records.antiAliasing +heli.antialiasing = false + +# Add stream description to traces. +heli.stream.description = true + +# Define the method to scale traces within rows. Possible values are: +# minmax: Scale all rows to configured minimum and maximum amplitudes +# configured by amplitudeRange.min and amplitudeRange.max. +# row: Scale each row to the maximum within this row. +heli.amplitudeRange.scaling = minmax + +# Minimum amplitude to show in trace. Requires amplitudeRange.scale = "minmax". +heli.amplitudeRange.min = -0.00001 + +# Minimum amplitude to show in trace. Requires amplitudeRange.scale = "minmax". +heli.amplitudeRange.max = +0.00001 + +# Image creation interval. Negative values disable image dumping. If enabled, +# images are generated at the configured interval. +heli.dump.interval = 0 + +# Name of output file. The filename can contain placeholders that are replaced +# by the corresponding streamID parts: +# %N : network code +# %S : station code +# %L : location code +# %C : channel code +# Placeholders are important if more than one stream is given and capture mode +# is active. +heli.dump.outputFile = /tmp/heli_%N_%S_%L_%C.png + +# Image resolution +heli.dump.dpi = 300 + +# Number of pixels horizontally +heli.dump.xres = 1024 + +# Number of pixels vertically +heli.dump.yres = 768 + +# Defines the path to a script that is called whenever an image has been +# captured and written to disc. The only parameter is the path to the generated +# image. +scripts.postprocessing = "" diff --git a/etc/defaults/scm.cfg b/etc/defaults/scm.cfg new file mode 100644 index 0000000..f6d53d5 --- /dev/null +++ b/etc/defaults/scm.cfg @@ -0,0 +1 @@ +plugins = ${plugins}, mncursesplugin diff --git a/etc/defaults/scmag.cfg b/etc/defaults/scmag.cfg new file mode 100644 index 0000000..bad2eea --- /dev/null +++ b/etc/defaults/scmag.cfg @@ -0,0 +1,55 @@ +# Send to the MAGNITUDE group +connection.primaryGroup = MAGNITUDE + +# Receive objects from PICK, AMPLITUDE and LOCATION group +connection.subscriptions = PICK, AMPLITUDE, LOCATION + +# Interval between 2 sending processes. The interval has influence how often +# information is updated. +sendInterval = 1 + +# The minimum weight of an arrival to be used for magnitude +# calculations. +minimumArrivalWeight = 0.5 + +# Defines the types of magnitudes to calculate. +# A magnitude of a given type is going to be calculated +# only when a corresponding amplitude exists. Check +# the amplitudes calculated by scautopick and scamp also. +magnitudes = MLv, mb, mB, Mwp + +# Defines the average method to use when computing +# the network magnitude. To define the average method +# per magnitude type append the type, eg: +# magnitudes.average = default, MLv:median +# The default behaviour is to compute the mean if less +# than 4 contributed station magnitudes exist otherwise +# a trimmed mean of 25 percent is used. +magnitudes.average = default + +# Enable/disable calculation of a summary magnitude +summaryMagnitude.enabled = true + +# This is the minimum station magnitude required for any +# magnitude to contribute to the summary magnitude at all. If +# this is set to 4 then no magnitude with less than 4 station +# magnitudes is taken into consideration even if this results +# in no summary magnitude at all. For this reason, the +# default here is 1 but in a purely automatic system it should +# be higher, at least 4 is recommended. +summaryMagnitude.minStationCount = 1 + +# Define the type of the summary magnitude +summaryMagnitude.type = M + +# Define the coefficients to calculate the weight +# of a magnitude: +# weight = a*magStationCount+b +# Unnamed values define the default values +summaryMagnitude.coefficients.a = 0, Mw(mB):0.4, Mw(Mwp):0.4 +summaryMagnitude.coefficients.b = 1, MLv:2, Mw(mB):-1, Mw(Mwp):-1 + +# Define the magnitudes to include into the the summary magnitude +# calculation +#summaryMagnitude.whitelist = "" +summaryMagnitude.blacklist = mB, Mwp diff --git a/etc/defaults/scmaster.cfg b/etc/defaults/scmaster.cfg new file mode 100644 index 0000000..248e233 --- /dev/null +++ b/etc/defaults/scmaster.cfg @@ -0,0 +1,100 @@ +# The available queues. +queues = production, playback + +# The default group set +defaultGroups = AMPLITUDE, \ + PICK, \ + LOCATION, \ + MAGNITUDE, \ + FOCMECH, \ + EVENT, \ + QC, \ + PUBLICATION, \ + GUI, \ + INVENTORY, \ + ROUTING, \ + CONFIG, \ + LOGGING, \ + IMPORT_GROUP, \ + SERVICE_REQUEST, \ + SERVICE_PROVIDE + +interface { + bind = 0.0.0.0:18180 + + # List of IP masks which are allowed to access + #acl = 0.0.0.0/0 + + ssl { + bind = 0.0.0.0:-1 # Disabled by default + + # List of IP masks which are allowed to access + #acl = 0.0.0.0/0 + + # The private server key. This key will not be shared with clients and + # must be kept secret. + key = @CONFIGDIR@/ssl/key.pem + + # The server certificate shared with clients. + certificate = @CONFIGDIR@/ssl/cert.pem + } +} + + +queues { + production { + # Grant access to all connections + acl = 0.0.0.0/0 + + # The plugins loaded and executed for this particular queue. + # The execution order is exactly the same order as given here. + plugins = dbstore + + processors { + messages = dbstore + + # Configure the dbstore processor + messages { + dbstore { + # Select the database driver. This all depends on the + # loaded plugins. + driver = mysql + + # Defines the read connection sent to the clients + read = sysop:sysop@localhost/seiscomp + + # Defines the write connection for the plugin. This line + # will not be published and only be used internally. + write = sysop:sysop@localhost/seiscomp + + # If enabled, the plugin will check the database schema + # version and refuse to start if the version doesn't match + # the latest version. If disabled and the an object needs + # to be stored which is incompatible with the database + # schema this object is lost. Leave this option enabled + # unless you know exactly what are you doing and what + # the consequences are. + strictVersionMatch = true + } + } + } + } + + playback { + # Grant access only to localhost + acl = 127.0.0.1 + } +} + + +http { + # The directory served by the http server at staticPath + filebase = @DATADIR@/scmaster/http/ + + # The URL path at which html files and assets are available. + # All files under filebase will be served at this URL path. + staticPath = / + + # The URL path at which the broker websocket is available. + brokerPath = / +} diff --git a/etc/defaults/scmv.cfg b/etc/defaults/scmv.cfg new file mode 100644 index 0000000..dd43354 --- /dev/null +++ b/etc/defaults/scmv.cfg @@ -0,0 +1,13 @@ +# Enable maps legends +scheme.map.showLegends = true + +# Messaging subscriptions +connection.subscriptions = AMPLITUDE, PICK, EVENT, LOCATION, MAGNITUDE, QC, CONFIG + +# Sets the location of the map symbol legend (QC, ground motion). +# Use either: topleft, topright, bottomright or bottomleft. +mapLegendPosition = topleft + +# Sets the location of the event symbol legend. Use either: +# topleft, topright, bottomright or bottomleft. +eventLegendPosition = bottomleft diff --git a/etc/defaults/scolv.cfg b/etc/defaults/scolv.cfg new file mode 100644 index 0000000..4f3994f --- /dev/null +++ b/etc/defaults/scolv.cfg @@ -0,0 +1,69 @@ +# Messaging subscriptions +connection.subscriptions = EVENT, LOCATION, FOCMECH, MAGNITUDE, PICK, CONFIG, GUI + +# Load initially events of 1 day from +# database +loadEventDB = 1.0 + +# Define favourite pick phases +# These phases go into the "Picking" menu as +# top-level items +picker.phases.favourites = P, Pn, Pg, pP, S, Sg, sP + +# Defines the phases (additionally to the pick phases) +# for which theoretical arrival times are computed and +# which are plotted into the trace +picker.showPhases = P, Pn, Pg, pP, sP, S, Sg + +# When loading the traces of an event (origin) are +# all picks (not only the associated ones) going to be +# loaded within that timewindow from the database +picker.loadAllPicks = false + +# Load all components (Z,N,E) when opening the picker +# per default +picker.loadAllComponents = false + +# Magnitudes to compute manually by default +magnitudes = MLv, mb, mB, Mwp + +# A list of magnitude types to be displayed in the summary widget (F8). +visibleMagnitudes = M, ML, MLv, mb, mB, Mwp, Mjma, Ms_20, Ms(BB) + +# Default visible column set of arrival table. The +# order of the table columns is fixed and will not +# reflect the order given here. +# Possible values are: +# * Used +# * Status +# * Phase +# * Weight +# * Method +# * Polarity +# * Net +# * Sta +# * Loc/Cha +# * Timeres +# * Dis +# * Az +# * Time +# * +/- +# * Slo +# * Slores +# * Baz +# * Bazres +# * Created +# * Latency +olv.arrivalTable.visibleColumns = Used, Status, Phase, Net, Sta, Loc/Cha, Timeres, Dis, Az, Time, +/- + +# enable/disable advanced options (magnitude parameters) for artificial +# origin creations +olv.artificialOriginAdvanced = false + +# If a locator does not populate the take off angle in its arrivals +# the first motion plot will not show picked polarities. This option +# defines whether to compute take off angles that are not present +# in the arrivals or not. +olv.computeMissingTakeOffAngles = true + +olv.systemTray = true diff --git a/etc/defaults/scqc.cfg b/etc/defaults/scqc.cfg new file mode 100644 index 0000000..2115f62 --- /dev/null +++ b/etc/defaults/scqc.cfg @@ -0,0 +1,119 @@ +# +# *** QcTool default configuration file *** +# +# Place a copy with your own modifications +# in ~/.seiscomp +# + +# Send to the QC group +connection.primaryGroup = QC + + +# Receive objects from CONFIG group +connection.subscriptions = CONFIG + + +# ID of the creator +CreatorId="smi://de.gfz-potsdam/QcTool_0.3.1" + + +# use only configured streams (trunk/key/station_*) (z-component) (True/False) +# --> the same streams as e.g. scautopick works on +useConfiguredStreams = true + +# If useConfiguredStreams is true then this parameter decides whether to use +# only the vertical component (default) or all three components. +# The 3 components are collected from the inventory according to channel +# orientation. If that is not possible then the fixed components N and E will +# be used. +use3Components = false + +# if useConfiguredStreams == False then +# load (from inventory) only those streams, matching the streamMask +# RegEx e.g. "^(NET1|NET2)\.(STA1|STA2|STA3)\.(LOC)\.((BH)|(LH)|(HH))Z$" +# RegEx e.g. "^(.+)\.(.+)\.(.*)\.(.+)Z$" +streamMask = "^(.+)\.(.+)\.(.*)\.(BHZ)$" + + +# Database look up for past entries not older than x days +# (to determine the last QC parameter calculated) +# [days] +dbLookBack = 7 + + +# currently implemented QcPlugins: +# QcDelay, QcLatency, QcTiming, QcRms, QcOffset, QcGap, QcSpike, QcOutage +# +# Load this plugins for calculating Qc Parameters +plugins = qcplugin_delay, \ + qcplugin_latency, \ + qcplugin_timing, \ + qcplugin_rms, \ + qcplugin_offset, \ + qcplugin_gap, \ + qcplugin_overlap, \ + qcplugin_availability, \ + qcplugin_spike, \ + qcplugin_outage + + +# QcPlugin DEFAULT configuration +# +# Use this plugin only for realtime processing [True]. +# Default [False] means, plugin is able to +# process archived data AND realtime data streams. +plugins.default.realTimeOnly = False +# +# Qc-Buffer Length [s] (aka LTA-Buffer) +# Must be >= plugins.*.bufferLength +plugins.default.buffer = 4000 +# +# A R C H I V E +# Interval for sending archive messages [s] +# ... which will finally end up in the database +# so be carefull, not setting it too small!!! +# If set to -1, nothing gets written into the database +plugins.default.archive.interval = -1 +# Archive Buffer length [s] +plugins.default.archive.buffer = 3600 +# +# R E P O R T +# Interval for sending report messages [s] +# ... which e.g. may be displayed by scqcv (QcView) +plugins.default.report.interval = 60 +# Report Buffer length [s] +plugins.default.report.buffer = 600 +# Report messages are generated in case of no data is received since timeout seconds [s] +# (only in realtime processing mode) +plugins.default.report.timeout = 0 +# +# A L E R T +# (only available in realtime processing mode) +# !!! STILL EXPERIMENTAL !!! +# Interval for checking alert thresholds [s] +# A value of -1 disables threshold checking. +plugins.default.alert.interval = -1 +# Alert Buffer length [s] (aka STA-Buffer) +plugins.default.alert.buffer = 1800 +# Alert threshold in percent [%], single value. [list: 25,50,75 ... not yet implemented] +plugins.default.alert.thresholds = 150 + + +# QcPlugin SPECIFIC configuration +plugins.QcLatency.report.buffer = 60 +plugins.QcLatency.report.timeout = 60 +plugins.QcLatency.realTimeOnly = True +# +plugins.QcDelay.report.buffer = 60 +plugins.QcDelay.report.timeout = 60 +plugins.QcDelay.realTimeOnly = True +# +plugins.QcAvailability.report.timeout = 60 +# +plugins.QcRms.report.buffer = 3600 +plugins.QcRms.report.timeout = 60 +plugins.QcRms.realTimeOnly = True +# +# If there is a gap of more than x [s], +# write an OUTAGE entry into the database. +plugins.QcOutage.notifyDB = 1800 diff --git a/etc/defaults/scqcv.cfg b/etc/defaults/scqcv.cfg new file mode 100644 index 0000000..a9d9c93 --- /dev/null +++ b/etc/defaults/scqcv.cfg @@ -0,0 +1,448 @@ +# Defines the primary group of a module. This is the name of the group where a +# module sends its messages to if the target group is not explicitely given in +# the send call. +connection.primaryGroup = QC + +# Defines a list of message groups to subscribe to. The default is usually +# given by the application and does not need to be changed. +connection.subscriptions = QC, CONFIG + +# List of QC paramters to be displayed in the details table. Read the scqc +# documentation for a list of available QC parameters and the default +# configuration of scqcv $SEISCOMP_ROOT/etc/defaults/scqcv.cfg for more +# examples. +# Format: "Parameter name : ConfigName" +# Example: "delay : delay","spikes count : spike","spikes amplitude : +# spikeAmplitude" Refer to the parameters by their ConfigName to configure the +# attriutes. +parameter = "latency : latency",\ + "delay : delay",\ + "timing quality : timing",\ + "offset : offset",\ + "rms : rms",\ + "gaps count : gap",\ + "overlaps count : overlap",\ + "availability : availability",\ + "spikes count : spike" +# "gaps interval : gapInterval",\ +# "gaps length : gapLength",\ +# "spikes interval : spikeInterval",\ +# "spikes amplitude : spikeAmplitude" +# "overlaps interval : overlapInterval",\ +# "overlaps length : overlapLength" + +# List of channels to display. By default the global binding configuration is +# used which can be overwritten with this parameter. +streams.codes = default + +# Add new streams automatically to the streams configured in streams.codes when +# waveform QC parameters are provided for these streams. +streams.cumulative = false + +# Length of data to be displayed. +streamWidget.length = 600 + +# Names of range profile to be considered. The range profiles define the +# background color table fields depending on the field value. Add the default +# ranges for which different intervals and the color are configured. +default.ranges = sane, bad + +# Values are: int, float, percent, timeSpan +# Displays raw values if unset. +default.format = float + +# Default time in seconds, how long a value is displayed in scqcv if no update +# is received. Setting to 0 means, show value until updated. +default.expire = 0 + +# A color defined by the color definitions below. +default.color = grey1 + +# Activate to display absolute values (modulus). +default.useAbsoluteValue = false + +# Values are: int, float, percent, timeSpan +# Displays raw values if unset. +availability.format = percent + +# Default time in seconds, how long a value is displayed in scqcv if no update +# is received. Setting to 0 means, show value until updated. +availability.expire = 600 + +# Value interval for range sane +availability.range.sane = 99.0, 101.0 + +# Contribution of this range for computing the score. The range must be +# considered. +availability.range.sane.count = 0 + +# Names of range profile to be considered The range profiles s define the +# background color table fields depending on the field value. Add the default +# ranges for which different intervals and the color are configured. +delay.ranges = sane, inter, bad + +# Values are: int, float, percent, timeSpan +# Displays raw values if unset. +delay.format = timeSpan + +# Value interval for range sane +delay.range.sane = 0.0, 60.0 + +# Value interval for range inter +delay.range.inter = 60, 120 + +# Contribution of this range for computing the score. The range must be +# considered. +delay.range.bad.count = -500 + +# A color defined by the color definitions below. +delay.range.bad.color = red + +# Contribution of this range for computing the score. The range must be +# considered. +delay.range.inter.count = -1 + +# A color defined by the color definitions below. +delay.range.inter.color = yellow + +# Contribution of this range for computing the score. The range must be +# considered. +delay.range.sane.count = 0 + +# A color defined by the color definitions below. +delay.range.sane.color = green + +# Default time in seconds, how long a value is displayed in scqcv if no update +# is received. Setting to 0 means, show value until updated. +gap.expire = 600 + +# Value interval for range sane +gap.range.sane = 0.0, 0.0 + +# Contribution of this range for computing the score. The range must be +# considered. +gap.range.sane.count = 0 + +# Values are: int, float, percent, timeSpan +# Displays raw values if unset. +latency.format = timeSpan + +# Value interval for range sane +latency.range.sane = 0.0, 600.0 + +# Contribution of this range for computing the score. The range must be +# considered. +latency.range.bad.count = -1000 + +# A color defined by the color definitions below. +latency.range.bad.color = grey + +# Contribution of this range for computing the score. The range must be +# considered. +latency.range.sane.count = 0 + +# Activate to display absolute values (modulus). +offset.useAbsoluteValue = true + +# Value interval for range sane +offset.range.sane = 0.0, 500.E3 + +# Contribution of this range for computing the score. The range must be +# considered. +offset.range.sane.count = 0 + +# Value interval for range sane +rms.range.sane = 10.0, 20.E3 + +# Contribution of this range for computing the score. The range must be +# considered. +rms.range.sane.count = 0 + +# A color defined by the color definitions below. +rms.range.sane.color = green + +# Default time in seconds, how long a value is displayed in scqcv if no update +# is received. Setting to 0 means, show value until updated. +spike.expire = 600 + +# Value interval for range sane +spike.range.sane = 0.0, 0.0 + +# Contribution of this range for computing the score. The range must be +# considered. +spike.range.sane.count = 0 + +# Names of range profile to be considered The range profiles s define the +# background color table fields depending on the field value. Add the default +# ranges for which different intervals and the color are configured. +timing.ranges = sane, inter, bad + +# Values are: int, float, percent, timeSpan +# Displays raw values if unset. +timing.format = int + +# Activate to display absolute values (modulus). +timing.useAbsoluteValue = false + +# Value interval for range sane +timing.range.sane = 90.0, 100.0 + +# Value interval for range inter +timing.range.inter = 50.0, 90.0 + +# Contribution of this range for computing the score. The range must be +# considered. +timing.range.bad.count = -100 + +# A color defined by the color definitions below. +timing.range.bad.color = darkred + +# Contribution of this range for computing the score. The range must be +# considered. +timing.range.inter.count = -1 + +# A color defined by the color definitions below. +timing.range.inter.color = yellow + +# Contribution of this range for computing the score. The range must be +# considered. +timing.range.sane.count = 0 + +# A color defined by the color definitions below. +timing.range.sane.color = green + +# +range.bad = -99.9E99, 99.0E99 + +# Contribution of this range for computing the score. The range must be +# considered. +range.bad.count = -1 + +# A color defined by the color definitions below. +range.bad.color = red + +# Contribution of this range for computing the score. The range must be +# considered. +range.inter.count = 0 + +# A color defined by the color definitions below. +range.inter.color = yellow + +# Value interval for range sane +range.sane = 0.0, 99.0E99 +# Contribution of this range for computing the score. The range must be +# considered. +range.sane.count = 0 + +# A color defined by the color definitions below. +range.sane.color = green + +############################################################################### +# score values for overview window +# Ranges to be considered for forming the score. Set the count parameter from the +# respective range section for controlling the contribution of this range. +score.default.ranges = latency,\ + delay,\ + timing,\ + below,\ + r10,\ + r09,\ + r08,\ + r07,\ + r06,\ + r05,\ + r04,\ + r03,\ + r02,\ + r01,\ + above,\ + unset + +# +range.above = 0, 999 + +# A color defined by the color definitions below. +range.above.color = green + +# Possible values: enableStream, disableStream +range.above.action = enableStream + +# +range.below = -99, -11 + +# Contribution of this range for computing the score. The range must be +# considered. +range.below.count = 0 + +# A color defined by the color definitions below. +range.below.color = grey + +# Possible values: enableStream, disableStream +range.below.action = disableStream + +# +range.timing = -200, -100 + +# Contribution of this range for computing the score. The range must be +# considered. +range.timing.count = 0 + +# A color defined by the color definitions below. +range.timing.color = darkred + +# +range.delay = -600, -500 + +# Contribution of this range for computing the score. The range must be +# considered. +range.delay.count = 0 + +# A color defined by the color definitions below. +range.delay.color = darkred + +# define cumulative status ranges +range.latency = -1900, -1000 + +# Contribution of this range for computing the score. The range must be +# considered. +range.latency.count = 0 + +# A color defined by the color definitions below. +range.latency.color = grey + +# +range.r01 = -1, -1 + +# Contribution of this range for computing the score. The range must be +# considered. +range.r01.count = 0 + +# A color defined by the color definitions below. +range.r01.color = red01 + +# +range.r02 = -2, -2 + +# Contribution of this range for computing the score. The range must be +# considered. +range.r02.count = 0 + +# A color defined by the color definitions below. +range.r02.color = red02 + +# +range.r03 = -3, -3 + +# Contribution of this range for computing the score. The range must be +# considered. +range.r03.count = 0 + +# A color defined by the color definitions below. +range.r03.color = red03 + +# +range.r04 = -4, -4 + +# Contribution of this range for computing the score. The range must be +# considered. +range.r04.count = 0 + +# A color defined by the color definitions below. +range.r04.color = red04 + +# +range.r05 = -5, -5 + +# Contribution of this range for computing the score. The range must be +# considered. +range.r05.count = 0 + +# A color defined by the color definitions below. +range.r05.color = red05 + +# +range.r06 = -6, -6 + +# Contribution of this range for computing the score. The range must be +# considered. +range.r06.count = 0 + +# A color defined by the color definitions below. +range.r06.color = red06 + +# +range.r07 = -7, -7 + +# Contribution of this range for computing the score. The range must be +# considered. +range.r07.count = 0 + +# A color defined by the color definitions below. +range.r07.color = red07 + +# +range.r08 = -8, -8 + +# Contribution of this range for computing the score. The range must be +# considered. +range.r08.count = 0 + +# A color defined by the color definitions below. +range.r08.color = red08 + +# +range.r09 = -9, -9 + +# Contribution of this range for computing the score. The range must be +# considered. +range.r09.count = 0 + +# A color defined by the color definitions below. +range.r09.color = red09 + +# +range.r10 = -10, -10 + +# Contribution of this range for computing the score. The range must be +# considered. +range.r10.count = 0 + +# A color defined by the color definitions below. +range.r10.color = red10 + +# +range.unset = 1000, 1000 + +# Contribution of this range for computing the score. The range must be +# considered. +range.unset.count = 0 + +# A color defined by the color definitions below. +range.unset.color = grey1 + + +# R, G, B, alpha colors +color.red = 255, 000, 000, 64 +color.yellow = 255, 255, 000, 64 +color.green = 000, 255, 000, 100 +color.blue = 000, 000, 255, 100 +color.grey = 130, 130, 130, 255 +color.grey1 = 240, 240, 240, 255 +color.darkred = 255, 000, 000, 196 +# +color.red10 = 255, 000, 000, 255 +color.red09 = 255, 000, 000, 240 +color.red08 = 255, 000, 000, 220 +color.red07 = 255, 000, 000, 200 +color.red06 = 255, 000, 000, 175 +color.red05 = 255, 000, 000, 150 +color.red04 = 255, 000, 000, 125 +color.red03 = 255, 000, 000, 100 +color.red02 = 255, 000, 000, 85 +color.red01 = 255, 000, 000, 70 + +# valid formats are: +format.float = 2 +format.timeSpan = "" +format.int = "" +format.percent = "" diff --git a/etc/defaults/screloc.cfg b/etc/defaults/screloc.cfg new file mode 100644 index 0000000..8fbb00f --- /dev/null +++ b/etc/defaults/screloc.cfg @@ -0,0 +1,8 @@ +# Defines the primary group of a module. This is the name of the group where a +# module sends its messages to if the target group is not explicitely given in +# the send call. +connection.primaryGroup = LOCATION + +# Defines a list of message groups to subscribe to. The default is usually +# given by the application and does not need to be changed. +connection.subscriptions = PICK, LOCATION diff --git a/etc/defaults/scrttv.cfg b/etc/defaults/scrttv.cfg new file mode 100644 index 0000000..c123137 --- /dev/null +++ b/etc/defaults/scrttv.cfg @@ -0,0 +1,67 @@ +# Defines the primary group of a module. This is the name of the group where a +# module sends its messages to if the target group is not explicitely given in +# the send call. +connection.primaryGroup = GUI + +# Defines a list of message groups to subscribe to. The default is usually +# given by the application and does not need to be changed. +connection.subscriptions = PICK, EVENT, LOCATION, GUI, CONFIG + +# If greater than 0 then all traces for which the data latency is higher than +# this value are hidden. +maxDelay = 0 + +# If enabled then all traces are sorted by distance when a new origin arrives. +resortAutomatically = true + +# If enabled, picks are shown. +showPicks = true + +# Defines the filter to be used when filtering is activated. This is only being +# used if filters is not set otherwise it is overridden. This option is mainly +# for backward compatibility. +filter = "RMHP(2)>>ITAPER(5)>>BW(3, 0.5, 8.0)" + +# Activates the first filter of the configured filter list after startup. This +# is equivalent to pressing 'f'. +autoApplyFilter = false + +# Defines the buffer size in seconds of the ring bu of each trace. +bufferSize = 1800 + +# If set to true all traces will be visible on application startup independent +# of data availability. +allTracesInitiallyVisible = false + +# Time span in seconds to switch back to the last view after an origin caused +# resorting. The default is 15min. +autoResetDelay = 900 + +# Defines a list of channels codes to be displayed. List items may contain +# wildcards at any position and are separated by comma. The list is intersected +# with all channels configured in inventory. +# Examples: +# default : display all streams configured by global bindings +# default, PF.BON.00.HH? : display default and all HH streams of PF.BON.00 +streams.codes = default + +# Latitude of the initial location for sorting traces. +streams.sort.latitude = 0.0 + +# Longitude of the initial location for sorting traces. +streams.sort.longitude = 0.0 + +# The sort mode applied initially +streams.sort.mode = distance + +# Minimum longitude. +streams.region.lonmin = -180.0 + +# Maximum longitude. +streams.region.lonmax = 180.0 + +# Minimum latitude. +streams.region.latmin = -90.0 + +# Maximum latitude. +streams.region.latmax = 90.0 diff --git a/etc/defaults/scsohlog.cfg b/etc/defaults/scsohlog.cfg new file mode 100644 index 0000000..2c43203 --- /dev/null +++ b/etc/defaults/scsohlog.cfg @@ -0,0 +1,12 @@ +# Defines the output file to be created at every timeout +monitor.output.file = @LOGDIR@/server.xml + +# Defines the timeout interval in seconds. Every N seconds +# the XML output file is generated. +monitor.output.interval = 60 + +# Defines an output script which is called after the output +# file is generated to trigger file processing. The execution +# of the script is blocking the application and thus the +# script should not spend too much time with its operations. +#monitor.output.script = "" diff --git a/etc/defaults/scwfas.cfg b/etc/defaults/scwfas.cfg new file mode 100644 index 0000000..2cb9f0e --- /dev/null +++ b/etc/defaults/scwfas.cfg @@ -0,0 +1,24 @@ +# Defines an alternative SDS archive handler. This is the name of an +# RecordStream interface that can be loaded via a plugin. +# If not given an internal implementation will be used. +handlerSDS = "" + +# Defines the filebase of the SDS archive. If an alternative archive handler +# is defined this value serves as input to setSource(). +filebase = @ROOTDIR@/var/lib/archive + +# Defines the server port for Arclink connections. By default the Arclink +# standard port 18001 will be used. +arclink.port = -1 + +# Defines the server port for FDSNWS connections. By default +# port 18082 will be used. +fdsnws.port = 8080 + +# Defines the base URL of the FDSN webservice that is +# given in the WADL document. +fdsnws.baseURL = http://localhost:8080/fdsnws + +# Defines the aggregated maximum time window (seconds) for all requested " +# streams. A value of 0 will deactive any restriction. +fdsnws.maxTimeWindow = 0 diff --git a/etc/defaults/scwfparam.cfg b/etc/defaults/scwfparam.cfg new file mode 100644 index 0000000..bdd7932 --- /dev/null +++ b/etc/defaults/scwfparam.cfg @@ -0,0 +1,251 @@ +# default subscriptions groups +connection.subscriptions = PICK, AMPLITUDE, MAGNITUDE, LOCATION, EVENT + +# The path to the processing info logfile. +wfparam.logfile = @LOGDIR@/scwfparam-processing-info.log + +# Defines the white- and blacklist of data streams to be used. +# The rules to decide if a stream is used or not are the following: +# 1. if whitelist is not empty and the stream is not on the whitelist, +# don't use it, ok otherwise +# 2. if blacklist is not empty and the stream is on the blacklist, +# don't use it, ok otherwise +# Both checks are made and combined with AND. +# Either whitelist or blacklist contains a list of patterns (wildcard allowed +# as * and ?), eg GE.*.*.*, *, GE.MORC.*.BH? +# Each stream id (NET.STA.LOC.CHA) will be checked against the defined patterns +wfparam.streams.whitelist = "" +wfparam.streams.blacklist = "" + +# Default value of total time window length in seconds +# if wfparam.magnitudeTimeWindowTable is not specified. +# This times window includes wfparam.preEventWindowLength. +wfparam.totalTimeWindowLength = 360 + +# Magnitude dependent time window table. The format is +# "mag1:secs1, mag2:secs2, mag3:secs3". +# If a magnitude falls between two configured magnitudes the time window +# of the lower magnitude is then used. No interpolation takes place. +# Magnitude outside the configured range are clipped to the lowest/highest +# value. +#wfparam.magnitudeTimeWindowTable = "" + +# The pre event time window length in seconds. +wfparam.preEventWindowLength = 60 + +# Analogue to wfparam.magnitudeTimeWindowTable but instead giving a time +# window, the distance in km is specified. +#wfparam.magnitudeDistanceTable = "" + +# The maximum epicentral distance in km of a station being considered for +# processing. This value is used if wfparam.magnitudeDistanceTable is not +# specified. +wfparam.maximumEpicentralDistance = 400 + +# Relative saturation threshold in percent. If the absolute raw amplitude +# exceeds X% of 2**23 counts the station will be excluded from processing. +wfparam.saturationThreshold = 80 + +# Specifies the STA length in seconds of the applied STA/LTA check. +wfparam.STAlength = 1 + +# Specifies the LTA length in seconds of the applied STA/LTA check. +wfparam.LTAlength = 60 + +# Specifies the minimum STALTA ratio to be reached to further process a station. +wfparam.STALTAratio = 3 + +# Specifies the number of seconds around P to be used to check the STA/LTA ratio +wfparam.STALTAmargin = 5 + +# Defines the factor applied to the signigicant duration to define the +# processing spetra time window. If that value is <= 0 the totalTimeWindowLength +# is used. +wfparam.durationScale = 1.5 + +# Specifies a list of damping values for computation of the relative +# displacement elastic response spectrum in percent. +wfparam.dampings = 5 + +# Specifies the number of natural periods for computation of the relative +# displacement elastic response spectrum between Tmin and Tmax. +wfparam.naturalPeriods = 100 + +# Defines if a linear spacing or logarithmic spacing between Tmin and Tmax +# is used. The default is a linear spacing. The logarithmic spacing will fail +# if either Tmin or Tmax is 0. +wfparam.naturalPeriods.log = false + +# Specifies the minimum period (Tmin) in seconds for computation of the +# relative displacement lastic response spectrum. +wfparam.Tmin = 0 + +# Specifies the maximum period (Tmax) in seconds for computation of the +# relative displacement elastic response spectrum. +wfparam.Tmax = 5 + +# Enables/disables after shock removal. +wfparam.afterShockRemoval = true + +# Enables/disables pre-event cut-off. A hardcoded sta/lta algorithm +# (with sta=0.1s, lta=2s, sta/lta threshold=1.2) is run on the time window +# defined by (expected_P_arrival_time - 15 s). The pre-event window is hence +# defined as [t(sta/lta =1.2) - 15.5 s, t(sta/lta =1.2) - 0.5 s]. +wfparam.eventCutOff = 1 + +# Specifies the filter order of the general filter. +wfparam.filter.order = 4 + +# Magnitude dependent filter table. The format is +# "mag1:fmin1;fmax1, mag2:fmin2;fmax2, mag3:fmin3;fmax3". +# If a magnitude falls between two configured magnitudes the filter +# of the lower magnitude is then used. No interpolation takes place. +# Magnitude outside the configured range are clipped to the lowest/highest +# value. +# Frequency values are given as simple positive doubles (Hz is assumed) or +# with suffix "fNyquist" which is then multiplied by the Nyquist frequency +# of the data to get the final corner frequency. +wfparam.magnitudeFilterTable = 0:0.2;0.8fNyquist,\ + 3:0.1;0.8fNyquist,\ + 5:0.05;0.8fNyquist,\ + 7:0.025;0.8fNyquist + +# Specifies the frequency of the general hi-pass filter. If this parameter is +# equal to 0 the hi-pass filter is not used. If suffix "fNyquist" is used +# then the value is multiplied by the Nyquist frequency of the data to get +# the final corner frequency of the filter. +wfparam.filter.loFreq = 0.025 + +# Specifies the frequency of the general lo-pass filter. If this parameter +# is equal to 0 the lo-pass filter is not used. If suffix "fNyquist" is +# used then the value is multiplied by the Nyquist frequency of the data to +# get the final corner frequency of the filter. +wfparam.filter.hiFreq = 40 + +# Specifies the filter order of the post-deconvolution filter. +wfparam.pd.order = 4 + +# Specifies the frequency of the post-deconvolution hi-pass filter. +# If this parameter is equal to 0 the hi-pass filter is not used. If +# suffix "fNyquist" is used then the value is multiplied by the Nyquist +# frequency of the data to get the final corner frequency of the filter. +wfparam.pd.loFreq = 0 + +# Specifies the frequency of the post-deconvolution lo-pass filter. +# If this parameter is equal to 0, the lo-pass filter is disabled. +# If suffix "fNyquist" is used then the value is multiplied by the Nyquist +# frequency of the data to get the final corner frequency of the filter. +wfparam.pd.hiFreq = 0 + +# Enables/disables deconvolution and thus the usage of wfparam.pd.order, +# wfparam.pd.loFreq and wfparam.pd.hiFreq. If a channel does not provide full +# response information it is not used for processing. +wfparam.deconvolution = true + +# Specifies the interval in seconds to check/start scheduled operations. +wfparam.cron.wakeupInterval = 10 + +# Specifies the maximum allowed idle time of a process before removed. +# The idle time is calculated if no further processing is scheduled and +# computes as: [now]-lastRun. +wfparam.cron.eventMaxIdleTime = 3600 + +# Enables/disables updating of a cron log file. This file will be created under +# ~/.seiscomp/log/[appname].sched +# and contains information about the scheduled events and the processing queue. +# The file is updated each n seconds, where n = wfparam.cron.wakeupInterval. +wfparam.cron.logging = true + +# Specifies the delay in seconds to delay processing if a new authoritative +# origin arrives for an event. +wfparam.cron.updateDelay = 60 + +# Specifies a list of delay times in seconds relative to event time to trigger +# the processing. When the first origin of an event arrives this list is used +# to construct the crontab for this event. +wfparam.cron.delayTimes = "" + +# Specifies the initial acquisition timeout. If the acquisition source +# (eg Arclink) does not respond within this threshold with waveforms, +# the request is discarded. +wfparam.acquisition.initialTimeout = 30 + +# Specifies the acquisition timeout when waveforms are being transfered. +# If no new waveforms arrive within this threshold, the request is aborted. +# This is important if a Seedlink connection is configured which can block +# the application for a very long time if at least one requested channel has +# no data. Seedlink does not finished the request until all data has been sent. +# When data will arrive for a particular channel is not known. +wfparam.acquisition.runningTimeout= 2 + +# Enables generation of short output event id's. +wfparam.output.shortEventID = false + +# Enables/disables the output of processed waveforms. +wfparam.output.waveforms.enable = false + +# Specifies the waveform output path. This parameter is only used if +# wfparam.output.waveforms.enable is true. +wfparam.output.waveforms.path = @LOGDIR@/shakemaps/waveforms + +# Enables/disables the creation of an event directory (named with eventID) when +# storing the processed waveforms. This parameter is only used if +# wfparam.output.waveforms.enable is true. +wfparam.output.waveforms.withEventDirectory = false + +# Enables/disables the output of spectra (psa, drs). The output format is a +# simple ascii file where the first column is the period and the second column +# the corresponding value. +wfparam.output.spectra.enable = false + +# Specifies the spectra output path. This parameter is only used if +# wfparam.output.spectra.enable is true. +wfparam.output.spectra.path = @LOGDIR@/shakemaps/spectra + +# Enables/disables the creation of an event directory (named with eventID) +# when storing the spectra. This parameter is only used if +# wfparam.output.spectra.enable is true. +wfparam.output.spectra.withEventDirectory = false + +# Enables/disables ShakeMap XML output. +wfparam.output.shakeMap.enable = true + +# Specifies the ShakeMap XML output path. This is only used if +# wfparam.output.shakeMap.enable is set to true. +wfparam.output.shakeMap.path = @LOGDIR@/shakemaps + +# Specifies a script that is called whenever a new ShakeMap XML is available. +# The script is called with 5 parameters: +# * EventID +# * modified EventID for ShakeMap output +# * path to event directory (where input/event.xml and input/event_dat.xml lives) +# No files are deleted by the application. The ownership goes to the +# called script. +#wfparam.output.shakeMap.script = ... + +# Enables/disables synchronous or asynchronous script calls. If enabled, +# be careful not to spend too much time in the script. The application is +# blocked while the script is running. +wfparam.output.shakeMap.synchronous = true + +# If enabled the maximum PGV, PGA, PSA03, PSA10 and PSA30 of both horizontal +# components is used in the final output. Otherwise each component is saved. +wfparam.output.shakeMap.maximumOfHorizontals = false + +# The XML encoding string written to the Shakemap XML file. +wfparam.output.shakeMap.encoding = "UTF-8" + +# The target version of the Shakemap input files. +wfparam.output.shakeMap.version = 3 + +# Enables messaging output which creates objects of the StrongMotionParameters +# data model extension (defined by SED) and sends them to scmaster. In order to +# save the objects to the database, scmaster needs to load the dmsm plugin and +# the corresponding database schema must be applied. +# The default message group is AMPLITUDE. To change this group redefine +# connection.primaryGroup. +wfparam.output.messaging = false + +# Defines the magnitude tolerance to completely reprocess an event with respect +# to the last state. +wfparam.magnitudeTolerance = 0.5 diff --git a/etc/defaults/seedlink.cfg b/etc/defaults/seedlink.cfg new file mode 100644 index 0000000..51e6fdd --- /dev/null +++ b/etc/defaults/seedlink.cfg @@ -0,0 +1,114 @@ +# Default network code. Used when a network code is omitted by a client +# in STATION request. Should be set to the network code of the majority +# of configured stations. 1 or 2 characters long, uppercase. +network = XX + +# Defines the Seedlink port to accept requests. +port = 18000 + +# Path to the base directory of SeedLink data files (disk buffer). +filebase = @ROOTDIR@/var/lib/seedlink/buffer + +# List of trusted addresses. +trusted = 127.0.0.0/8 + +# List of IP addresses or IP/mask pairs (in ipchains/iptables syntax) +# that can access stations. Per station access definitions +# supersede this parameter. By default any client can access +# all stations. +access = 0.0.0.0/0 + +# Check start and end times of streams. +stream_check = true + +# If stream_check = enabled, also check for gaps in all channels that +# match given pattern. Register all gaps that are larger than +-0.5 seconds. +# gap_check_pattern = [EBLV][HLNG][ZNE]|S[NG][ZNE]. +# Disabled to save memory. +gap_check_pattern = XXXXX + +# Time difference between records (microseconds) above which a gap is declared. +gap_treshold = 500000 + +# Can be enabled or disabled. Required for slinktool option -tw. +window_extraction = true + +# Same as window_extraction for trusted IP addresses. +window_extraction_trusted = true + +# Allow websocket connections. +websocket = false + +# Same as websocket for trusted IP addresses. +websocket_trusted = false + +# If activated Seedlink uses the mseedfifo to read records and +# only the mseedfifo_plugin is started. This command is useful +# to playback historic data by eg msrtsimul. +msrtsimul = false + +# Size of memory buffer (number of recent Mini-SEED records kept in RAM). +buffers = 100 + +# Number of disk buffer segments (files under /station/segments/ +# where is the directory given by the filebase parameter). +segments = 50 + +# Size of one disk buffer segment in the records (512-byte units). +segsize = 1000 + +# Number of blank records to insert after the re-scan of disk buffer +# if /station/buffer.xml is not found (assuming the server +# did not terminate correctly). +blanks = 10 + +# Encoding of Mini-SEED records created by SeedLink. The value must be +# steim1 or steim2. If omitted, the global encoding parameter is used. +encoding = steim2 + +# INFO provided to arbitrary Internet hosts: ID, CAPABILITIES, STATIONS, +# STREAMS +info = streams + +# INFO provided to trusted hosts: ID, CAPABILITIES, STATIONS, STREAMS, +# GAPS, CONNECTIONS, ALL +info_trusted = all + +# Show requests in log file +request_log = true + +# Give warning if an input channel has time gap larger than 10 us +proc_gap_warn = 10 + +# Flush streams if an input channel has time gap larger than 0.1 s +proc_gap_flush = 100000 + +# Reset FIR filters if an input channel has time gap larger than 1 s +proc_gap_reset = 1000000 + +# Enable backfilling buffer for out-of-order records when raw samples +# are transmitted. This values defines its capacity in seconds. +backfill_buffer = 0 + +# Maximum allowed deviation from the sequence number of oldest packet if +# packet with requested sequence number is not found. If seq_gap_limit is +# exceeded, data flow starts from the next packet coming in, otherwise +# from the oldest packet in buffer. +# Use the following to always start with the oldest packet: +# seq_gap_limit = 16777216 +seq_gap_limit = 100000 + +# Total number of TCP/IP connections allowed +connections = 500 + +# Maximum number of TCP/IP connections per IP +connections_per_ip = 20 + +# Maximum speed per connection (0: throttle disabled) +bytespersec = 0 + +# Define a database read connection to be used for Seedlink station descriptions. +# If no database is configured (which is the default) then the station code will be used. +# If a remote host is specified, ensure that its database server is reachable from this computer. +inventory_connection = "" + diff --git a/etc/defaults/slarchive.cfg b/etc/defaults/slarchive.cfg new file mode 100644 index 0000000..3537d06 --- /dev/null +++ b/etc/defaults/slarchive.cfg @@ -0,0 +1,72 @@ +# Host of the Seedlink server to connect to. If the acquisition +# is running on one system nothing needs to be changed. +address = 127.0.0.1 + +# The port of the Seedlink server to connect to. If the acquisition +# is running on one system this port must match the configured +# local Seedlink port. +port = 18000 + +# Path to waveform archive where all data is stored. Relative paths +# (as the default) are treated relative to the installation +# directory ($SEISCOMP_ROOT). +archive = var/lib/archive + +# Number of records (512 byte units) to buffer before flushing to +# disk. +buffer = 1000 + +# The network reconnect delay (in seconds) for the connection +# to the SeedLink server. If the connection breaks for any +# reason this will govern how soon a reconnection should be +# attempted. The default value is 30 seconds. +delay = 30 + +# The network timeout (in seconds) for the connection to the +# SeedLink server. If no data [or keep alive packets?] are received +# in this time range the connection is closed and re-established +# (after the reconnect delay has expired). The default value is +# 600 seconds. A value of 0 disables the timeout. +networkTimeout = 900 + +# Timeout for closing idle data stream files in seconds. The idle +# time of the data streams is only checked when some packets has +# arrived. If no packets arrived no idle stream files will be +# closed. There is no reason to change this parameter except for +# the unusual cases where the process is running against an open +# file number limit. Default is 300 seconds. +idleTimeout = 300 + +# Interval (in seconds) at which keepalive (heartbeat) packets +# are sent to the server. Keepalive packets are only sent if +# nothing is received within the interval. This requires a +# Seedlink version >= 3. +keepalive = 0 + +# Path to cerificate store where all certificates and CRLs are stored. Relative +# paths(as the default) are treated relative to the installation directory +# ($SEISCOMP_ROOT). If the signature check is enabled slarchive loads all files +# at start. The store uses the OpenSSl store format. From the offical OpenSSL +# documentation: "The directory should contain one certificate or CRL per file +# in PEM format, with a file name of the form hash.N for a certificate, or +# hash.rN for a CRL. The .N or .rN suffix is a sequence number that starts at +# zero, and is incremented consecutively for each certificate or CRL with the +# same hash value. Gaps in the sequence numbers are not supported, it is +# assumed that there are no more objects with the same hash beyond the first +# missing number in the sequence.The .N or .rN suffix is a sequence number that +# starts at zero, and is incremented consecutively for each certificate or CRL +# with the same hash value. Gaps in the sequence numbers are not supported, it +# is assumed that there are no more objects with the same hash beyond the first +# missing number in the sequence." The hash value can be obtained as follows: +# openssl x509 -hash -noout -in >file< +validation.certs = var/lib/certs + +# Signatures are expected to be carried in blockette 2000 as opaque data. +# Modes: +# ignore : Signatures will be ignored and no further actions will be taken. +# warning: Signatures will be checked and all received records which do not +# carry a valid signature or no signature at all will be logged with at warning +# level. skip : All received records without a valid signature will be ignored +# and will not be processed. +validation.mode = ignore + diff --git a/etc/descriptions/access.xml b/etc/descriptions/access.xml new file mode 100644 index 0000000..43c82df --- /dev/null +++ b/etc/descriptions/access.xml @@ -0,0 +1,46 @@ + + + + + Access module for FDSNWS. + + + + + + FDSNWS station access. + + + + + Defines access to restricted data. When defined it will add the listed users to the list of authorized users to access certain restricted data give the parameters on this binding. + + + + List of users (e-mail addresses) allowed to access the restricted data. + + + + + When disableStationCode option is set to true the access entries will be generated only for the network level (and optionally stream level), no station code will be filled (this can potentially reduce the number of entries on the access table, and save memory on the request handler when used). + + + + + List of locations.streams this access rule applies to. The location code is optional (optional, allow wildcards). + + + + + Start of validity (optional). + + + + + End of validity (optional). + + + + + + diff --git a/etc/descriptions/bindings2cfg.xml b/etc/descriptions/bindings2cfg.xml new file mode 100644 index 0000000..772a418 --- /dev/null +++ b/etc/descriptions/bindings2cfg.xml @@ -0,0 +1,73 @@ + + + + + Synchronize key files with configuration database or convert them to + configuration XML. + + + + bindings2cfg [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#print-config-vars + generic#validate-schema-params + generic#dump-settings + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#config-db + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + + + + + + + + diff --git a/etc/descriptions/caps_plugin.xml b/etc/descriptions/caps_plugin.xml new file mode 100644 index 0000000..2ea70a3 --- /dev/null +++ b/etc/descriptions/caps_plugin.xml @@ -0,0 +1,49 @@ + + + + seedlink + Global options for the CAPS plugin + + + + + + The maxmimum time difference with respect to current + time of the end time of a received record. If + exceeded then the end time will not be logged into + the state file. + + + + + Request all records in-order. Out-of-order records will + be skipped. + + + + + + + + Transfers data from CAPS to SeedLink server + + + + CAPS URL to fetch data from, format: [[caps|capss]://][user:pass@]host[:port] + + + + + Comma separated list of streams. Stream format is [LOC.CHA]. + Streams may contain wildcards + + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/diskmon.xml b/etc/descriptions/diskmon.xml new file mode 100644 index 0000000..24ad66c --- /dev/null +++ b/etc/descriptions/diskmon.xml @@ -0,0 +1,22 @@ + + + + Monitors a disk and sends notifications. + + + + Disk usage threshold in percent. Each time when the disk usage exceeds this level, + an alert e-mail is send to the user. Note that disk usage is only checked when a + cron job of seiscomp check is installed or seiscomp check is called + regularly by other means. + + + + + Comma-separated list of e-mail addresses to notify when disk usage + threshold is exceeded. + + + + + diff --git a/etc/descriptions/dlsv2inv.xml b/etc/descriptions/dlsv2inv.xml new file mode 100644 index 0000000..2fb8db8 --- /dev/null +++ b/etc/descriptions/dlsv2inv.xml @@ -0,0 +1,91 @@ + + + + + Convert dataless SEED to SeisComP inventory XML. + + + + dlsv2inv [OPTIONS] input [output=stdout] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#print-config-vars + generic#validate-schema-params + generic#dump-settings + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + verbosity#print-context + verbosity#print-component + verbosity#log-utc + verbosity#trace + + + + + + + + + + + + + + + + + + diff --git a/etc/descriptions/ew2sc3.xml b/etc/descriptions/ew2sc3.xml new file mode 100644 index 0000000..a7cd80b --- /dev/null +++ b/etc/descriptions/ew2sc3.xml @@ -0,0 +1,160 @@ + + + + + Earthworm hypo2000_arc messages importer + + + + + + Folder to store logs and achives of hypo2000_arc files from Earthworm export_genericer + + + + + Earthworm export_generic's socket to listen to + + + + + Expected Earthworm Module ID (ew2sc3 will read the message only if its ModID is correct). + Set to 0 (MOD_WILDCARD) to accept any Earthworm Module ID. + + + + + Expected Earthworm Institute ID (ew2sc3 will read the message only if its InstID is correct) + Set to 0 (INST_WILDCARD) to accept any Earthworm Institute ID. + + + + + Institute name to use when storing origin into database. + If blank, the origin will have the system AgencyID. If specified, the origin will have the given AgencyID. + + + + Author name to use when storing origin into database. + + + + Earthworm export_generic IP address to connect to + + + + + Default event latitude to use if hypo2000_arc location is null (space filled) + + + + + Default event longitude to use if hypo2000_arc location is null (space filled) + + + + + Name of the earth model used by Earthworm location process + + + + + Enable/Disable hypo2000_arc message file archiving (usefull for troobleshooting) + + + + + Time interval (in sec) between two heartbeats messages sent by SeisComP. + This parameter should be lower than Earthworm export_generic RcvAliveInt parameter + + + + + Maximum time (in millisecond) before the connection to Earthworm export_generic times out. + This parameter has to be set accordingly to Earthworm export_generic parameters + + + + + Maximum message size (in char) between the Earthworm export_generic and ew2sc3. + This parameter has to be set accordingly to Earthworm export_generic MaxMsgSize parameter + + + + + Alive string to send to Earthworm export_generic in order to keep the connection alive. + This string should be identical as Earthworm export_generic RcvAliveText string + + + + + Alive string expected from Earthworm export_generic in order to keep the connection alive. + This string should be identical as Earthworm export_generic SendAliveText string + + + + + Enable/Disable uncertainties conversions. + Earthworm doesn't have uncertainties but weight from 0 to 4. + If enabled, ew2sc3 will convert pick weight to uncertainties followind a simple mapping between weight and pickerUncertainties list. + + + + + Uncertainty values (in sec) to use when enableUncertainties is enabled. + Refer to scolv document for syntax. + + + + + Maximum weight value from Earthworm corresponding to maximum uncertainty + + + + + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + + + + + diff --git a/etc/descriptions/fdsnws.xml b/etc/descriptions/fdsnws.xml new file mode 100644 index 0000000..b30645f --- /dev/null +++ b/etc/descriptions/fdsnws.xml @@ -0,0 +1,286 @@ + + + + + Provide FDSN Web Services. + + + + fdsnws [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + records#record-driver-list + records#record-url + records#record-file + records#record-type + + + + + + Define the bind address of the server. "0.0.0.0" allows + any interface to connect to this server whereas "127.0.0.0" + only allows connections from localhost. + + + + + Server port to listen for incoming requests. Note: The FDSN Web + service specification defines the service port 80. Please refer + to the documentation on how to serve on privileged ports. + + + + + Number of maximum simultaneous requests. + + + + + Maximum number of objects per query, used in fdsnws-station and + fdsnws-event to limit main memory consumption. + + + + + Restrict end time of requests to current time - realtimeGap + seconds. Negative values allowed. Used in fdsnws-dataselect. + WARNING: If this value is unset and a realtime recordsource + (e.g. slink) is used, requests may block if end time in future + is requested. + + + + + Maximum number of samples (in units of million) per query, used + in fdsnws-dataselect to prevent a single user to block one + connection with a large request. + + + + + Set the number of bytes to buffer for each chunk of waveform data + served to the client. The lower the buffer the higher the overhead + of Python Twisted. The higher the buffer the higher the memory + usage per request. 100kB seems to be a good trade-off. + + + + + Path to password file used in fdsnws-dataselect/queryauth. The + format is 'username:password' separated by lines. Because of the + HTTP digest authentication method required by the FDSN + specification, the passwords have to be stored in plain text. + + + + + Path to access log file. If unset no access log is created. + + + + + Path to request log file. If unset no request log is created. + + + + + Secret salt for calculating userID. + + + + + List of domain names Cross-Origin Resource Sharing (CORS) + request may originate from. A value of '*' allows any web page + to embed your service. An empty value will switch of CORS + requests entirely. An example of multiple domains might be: + 'https://test.domain.de, https://production.domain.de'. + + + + + Enable/disable access to restricted inventory data. + + + + + Enable/disable handling of time-based conditional requests (RFC + 7232) by the fdsnws-station resource. + + + + + If enabled, then access to restricted waveform data is + controlled by arclink-access bindings. By default authenticated + users have access to all data. + + + + + If enabled, author information is removed from any event + creationInfo element. + + + + + If enabled, event comment elements are no longer accessible. + + + + + If set, the event service will only return events having a + preferred origin with a matching evaluationMode property. + + + + + List of enabled event types + + + List of disabled event types + + + + + List of enabled event formats. If unspecified, all supported + formats are enabled. + + + + + Enable/disable the DataSelect service. + + + + + Enable/disable the Event service. + + + + + Enable/disable the Station service. + + + + + Enable/disable the Availability service. Note: This is a non + standard FDSNWS extension served under fdsnws/ext/availability. + + + + + Path to station inventory filter file. + + + + + Path to dataselect inventory filter file. + + + + + If enabled, a debug line is written for each stream ID explaining + why a stream was added/removed by a inventory filter. + + + + + Define the prefix for the default filenames if downloading and + saving data from within a browser. + For data loaded using dataselect, it is thus fdsnws.mseed by default. + + + + + Provide access to waveform data availability information stored + in the SeisComP database. In case of a SDS archive, this + information may be collected by scardac (SeisComP archive + data availability collector). + + + + Enable loading of data availabilty information from + SeisComP database. Availability information is used by + station and ext/availability service. + + + + + Number of seconds data availabilty information is considered + valid. If the duration time is exceeded, the information is + fetched again from the database. + + + + + Name of the archive use in sync format of dataavailability + extent service. + + + + + Name of the archive use in some format of data availability + extent service. + + + + + + + Save request log to database. + + + + + Default user. + + + + + + + Enable auth extension. + + + + + GnuPG home directory. + + + + + List of revoked token IDs. + + + + + + diff --git a/etc/descriptions/fdsnxml2inv.xml b/etc/descriptions/fdsnxml2inv.xml new file mode 100644 index 0000000..85c3636 --- /dev/null +++ b/etc/descriptions/fdsnxml2inv.xml @@ -0,0 +1,72 @@ + + + + + Convert station inventory between FDSN StationXML format and + SeisComP inventory XML. + + + + + + + + fdsnxml2inv [OPTIONS] input [output] + + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#print-config-vars + generic#validate-schema-params + generic#dump-settings + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + verbosity#print-context + verbosity#print-component + verbosity#log-utc + verbosity#trace + + + + + + + + + + + diff --git a/etc/descriptions/gdrt_plugin.xml b/etc/descriptions/gdrt_plugin.xml new file mode 100644 index 0000000..0d40aa5 --- /dev/null +++ b/etc/descriptions/gdrt_plugin.xml @@ -0,0 +1,126 @@ + + + + seedlink + Global options for the qdrt plugin + + + + GDRT (GFZ Displacement Real-Time) is the data format of the Real-time Precise + Point Positioning (RTPPP) provided by the Helmholtz Centre Potsdam – GFZ German + Research Centre for Geosciences. This development was supported by the German + Federal Ministry of Education and Research (BMBF) within the EWRICA project. + + The GDRT plugin is a SeedLink plugin that converts GDRT UDP messages to + miniSEED channels according to the following table: + + +-------+---------------------+----------------+---------------------------------------+-------------------+ + | Index | Field name | Format | Description | miniSEED channel | + +=======+=====================+================+=======================================+===================+ + | 1 | Start of message | character(1) | A fixed character '>' | | + +-------+---------------------+----------------+---------------------------------------+-------------------+ + | 2 | Station ID | character(4) | GDRT station ID | | + +-------+---------------------+----------------+---------------------------------------+-------------------+ + | 3 | Epoch time | integer(4) | UTC time (year, month, day, hour, | | + | | | integer(2) | minute, second) | | + | | | integer(2) | | | + | | | integer(2) | | | + | | | integer(2) | | | + | | | double(%5.2f) | | | + +-------+---------------------+----------------+---------------------------------------+-------------------+ + | 4 | Solution type | integer(2) | 0: Single point positioning (SPP) | LBS | + | | | +---------------------------------------+ | + | | | | +1: Precise point positioning (PPP) | | + | | | +---------------------------------------+ | + | | | | +2: Precise point positioning | | + | | | | ambiguity resolution (PPP-AR) | | + | | | +---------------------------------------+ | + | | | | +4: Precise point positioning | | + | | | | regional augmentation (PPP-RA) | | + +-------+---------------------+----------------+---------------------------------------+-------------------+ + | 5 | Observation type | integer(2) | +1: GPS | LBO | + | | | +---------------------------------------+ | + | | | | +2: GLONASS | | + | | | +---------------------------------------+ | + | | | | +4: Galileo | | + | | | +---------------------------------------+ | + | | | | +8: BDS | | + | | | +---------------------------------------+ | + | | | | +16: Seismic data | | + +-------+---------------------+----------------+---------------------------------------+-------------------+ + | 6 | Positioning type | integer(2) | 1: Coordinate fixed solution | LBP | + | | | +---------------------------------------+ | + | | | | 2: Static solution | | + | | | +---------------------------------------+ | + | | | | 3: Kinematic solution | | + | | | +---------------------------------------+ | + | | | | 4: Dynamic solution | | + +-------+---------------------+----------------+---------------------------------------+-------------------+ + | 7 | Processing type | integer(2) | 1: Real-time solution | LBT | + | | | +---------------------------------------+ | + | | | | 2: Post-processing solution | | + +-------+---------------------+----------------+---------------------------------------+-------------------+ + | 8 | Ratio of | double(%6.2f) | The ratio value of PPP ambiguty | LBR (×100) | + | | ambiguity fix | | fixing (larger value means more | | + | | | | reliable ambiguity fixing) | | + +-------+---------------------+----------------+---------------------------------------+-------------------+ + | 9 | Number of | integer(2) | Number of satellites used for the | LBC (summed) | + | | satellites | integer(2) | solution (GPS, GLONASS, Galileo, BDS) | | + | | | integer(2) | | | + | | | integer(2) | | | + +-------+---------------------+----------------+---------------------------------------+-------------------+ + | 10 | Coordinate X | double(%14.4f) | Coordinates X, Y, and Z in the Earth | LBX | + +-------+---------------------+----------------+ Centered Earth Fixed (ECEF) +-------------------+ + | 11 | Coordinate Y | double(%14.4f) | coordinate system in meters | LBY | + +-------+---------------------+----------------+ +-------------------+ + | 12 | Coordinate Z | double(%14.4f) | | LBZ | + +-------+---------------------+----------------+---------------------------------------+-------------------+ + | 13 | Variance of | double(%14.4f) | Variance of coordinates (m²) | LB1 | + | | coordinate X | | | | + +-------+---------------------+----------------+ +-------------------+ + | 14 | Variance of | double(%14.4f) | | LB2 | + | | coordinate Y | | | | + +-------+---------------------+----------------+ +-------------------+ + | 15 | Variance of | double(%14.4f) | | LB3 | + | | coordinate Z | | | | + +-------+---------------------+----------------+---------------------------------------+-------------------+ + | 16 | Covariance of | double(%14.4f) | Covariance of coordinates (m²) | LB4 | + | | coordinates X and Y | | | | + +-------+---------------------+----------------+ +-------------------+ + | 17 | Covariance of | double(%14.4f) | | LB5 | + | | coordinates X and Z | | | | + +-------+---------------------+----------------+ +-------------------+ + | 18 | Covariance of | double(%14.4f) | | LB6 | + | | coordinates Y and Z | | | | + +-------+---------------------+----------------+---------------------------------------+-------------------+ + | 19 | Correction | string(4) | Name of orbit/clock/upd product and | | + | | information | | name of three used reference stations | | + | | | | in PPP-RA, separated with '+' | | + +-------+---------------------+----------------+---------------------------------------+-------------------+ + | 20 | End of message | character(1) | A fixed character '!' | | + +-------+---------------------+----------------+---------------------------------------+-------------------+ + + + + + GDRT station code. + + + + + Sensor location code. + + + + + Sample rate in Hz. + + + + + UDP port for receiving GDRT messages. By default port 9999 will be used. + + + + + diff --git a/etc/descriptions/global.xml b/etc/descriptions/global.xml new file mode 100644 index 0000000..e848ec1 --- /dev/null +++ b/etc/descriptions/global.xml @@ -0,0 +1,1353 @@ + + + + Global parameters for all trunk applications. + + + + Agency ID used to set creationInfo.agencyID in + data model objects. Should not contain spaces. + + + + + Datacenter ID which is primarily used by Arclink and + its tools. Should not contain spaces. + + + + + Organization name used mainly by ArcLink and SeedLink. + + + + + Author name used to set creationInfo.author in data model + objects. @appname@ is replace by the name of the + respective executable, @hostname@ by the name of the computer + on which it runs. + + + + + List of plugins loaded at startup. Separate multiple names by + comma. Add ${plugins} to consider all previously read values. + + + + + Path to the cities XML file. If undefined, the data is read + from "@CONFIGDIR@/cities.xml" or + "@DATADIR@/cities.xml". + SeisComP ships with "@DATADIR@/cities.xml". + + + + + Load cities from file configured by the "cityXML" + parameter. GUI and map applications enable loading of cities + by default considering the parameter + "scheme.map.showCities". + + + + + Load custom FEP (Flinn-Engdahl polygon) regions from + "@CONFIGDIR@/fep/" or @DATADIR@/fep/". + GUI and map applications and scevent enable FEP regions by + default. Explicitly uncheck the option to disregard by these + applications. + + + + + Define a custom pattern for generation of public object IDs. + The following variables are resolved: + @classname@ (class name of object), + @id@ (public object count), + @globalid@ (general object count), + @time@ (current time, use '/' to supply custom format e.g. + @time/%FT%T.%fZ@ + + + + + Name of the configuration module. + + + + + Define the database connection. If no database is configured + (which is the default) and a messaging connection is available, + the application will receive the parameters after the connection + is established. Override these values only if you know what you + are doing. + + The format is: service://user:pwd@host/database. + "service" is the name of the database driver which + can be queried with "--db-driver-list". + IMPORTANT: This parameter replaces former + "database.type" and "database.parameters" + which have been removed. + + + + + SeisComP applications access waveform data through the RecordStream + interface. Read the SeisComP documentation for a list of + supported services and their configuration. + + This parameter configures RecordStream source URL. + + Format: [service://]location[#type] + + "service": The name of the RecordStream implementation. + If not given, "file://" is implied. + + IMPORTANT: This parameter replaces former RecordStream + configurations in "recordstream.service" and + "recordstream.source" which have been removed. + + + + + + Set the logging level between 1 and 4 where 1=ERROR, + 2=WARNING, 3=INFO and 4=DEBUG. + + + + + Enable logging to syslog if supported by the host system. + + + + + Limit the logging to the specified list of components. + Example: 'Application, Server'. + + + + + For each log entry print the component right after the + log level. By default, the component output is enabled + for file output but disabled for console output. + + + + + For each log entry print the source file name and line + number. + + + + + Use UTC instead of local time in logging timestamps. + + + + + + Enable rotation of log files. + + + + + + Time span after which a log file is rotated. + + + + + How many historic log files to keep. + + + + + The maximum size of a logfile in byte. The default + value is 100 megabyte. If a logfile exceeds that + size, then it is rotated. To disable the size limit + give 0 or a negative value. + + + + + + + + Time span for counting input/output of objects. + + + + + + + Define the syslog facility to be used according to the + defined facility names in syslog.h. The default is local0. + If the given name is invalid or not available, + initializing logging will fail and the application quits. + + + + + + + + Define the URL of the messaging server to connect to. + URL format is [scheme://]host[:port][/queue]. + Default scheme: scmp, default port: 18180, + default queue: production. + Examples: + + All default: localhost + + Specific host: scmp://10.0.1.4:18180/production + + Specific host and SSL encryption: + scmps://10.0.1.4:18180/production + + + + + Define the username to be used. The length is maybe limited + by the messaging system used. By default, the module name + (name of the executable) is used but sometimes it exceeds + the 10 character limit and access is denied. To prevent + errors set a different username. An empty username will + let the system to generate one. + + + + + Define the OpenSSL client certificate to be used. Path to + OpenSSL certificate in PKCS 12 format or the prefix "data:" + followed by the Base64 encoded certificate data. + + + + + The connection timeout in seconds. 3 seconds are normally + more than enough. If a client needs to connect to a + remote system with a slow connection, a larger timeout + might be needed. + + + + + Define the primary group of a module. This is the name + of the group where a module sends its messages to if the + target group is not explicitly given in the send call. + + + + + Define the message encoding for sending. Allowed values + are "binary", "json" or "xml". + XML has more overhead in processing but is more robust when + schema versions between client and server are different. + + + + + Define a list of message groups to subscribe to. The + default is usually given by the application and does not + need to be changed. + + + + + + Define the database connection. If no database is configured + (which is the default) and a messaging connection is available, + the application will receive the parameters after the connection + is established. Override these values only if you know what you + are doing. + + + + Load the inventory database from a given XML file if set. + This overrides the inventory definitions loaded from the + database backend. + + + + + Load the configuration database from a given XML file if set. + This overrides the configuration definitions loaded from the + database backend. + + + + + + + + Define a whitelist of agencies that are allowed for + processing. Separate items by comma. + + + + + + + Define a blacklist of agencies that are not allowed for + processing. Separate items by comma. + + + + + + + + + List of network types to include when loading the + inventory. + + + + + List of station types to include when loading the + inventory. + + + + + + + List of network types to exclude when loading the + inventory. + + + + + List of station types to exclude when loading the + inventory. + + + + + + + + Path to crash handler script. + + + + + + + Define a list of core modules loaded at startup. + + + + + + + Enable sending of an application start- and stop message to + the STATUS_GROUP. + + + + + Enable automatic application shutdown triggered by a status + message. + + + + + Trigger shutdown if the module name of the received messages + match. + + + + + Trigger shutdown if the user name of the received messages + match. + + + + + + + A regular expression of all clients that should handle + a command message usually send to the GUI messaging group. + Currently this flag is only used by GUI applications to + set an artificial origin and to tell other clients to + show this origin. To let all connected clients handle the + command, ".*$" can be used. + + + + + + Travel time table related configuration. Travel time tables can + be added via plugins. Built-in interfaces are LOCSAT, libtau and + homogeneous. + For each loaded interface a list of supported models must be + provided. + + + + + The list of supported model names per interface. + + + + + + + Amplitude configurations overriding global bindings. + + + + + Magnitude configurations overriding global bindings. + Add regionalization by magnitude-type profiles. Profile + names correspond to magnitude types. + + + + + Path to a geofeature file, e.g. in BNA or GeoJSON format, + with one or more polygons defining geographic regions. + Each region allows further magnitude parametrization. + The order of polygons/regions matters. The list is + tested from front to back and the first hit wins. + Magnitude-region profiles refer to the polygons by the + name of the profile. The special name "world" + must not be present in the geofeature file. + + + + + Add parameter profiles for regions defined in the + geofeature file. Profile names must match one polygon + name. Otherwise, the profile is not considered. + The special name "world" corresponds to the + region of the entire planet as a fallback. + + Magnitude-specific parameters, e.g. calibration functions, + are adjusted in a separate section of magnitudes with + the same name. + + + + + Enable the region or not. + + + + + The minimum distance required to compute a + magnitude. This settings has no effect with e.g. + scautopick as there is no information about the + source of the event to compute the distance. + The default value is implementation specific. + + + + + The maximum distance allowed to compute a magnitude. + This settings has no effect with e.g. scautopick + as there is no information about the source of + the event to compute the distance. The default + value is implementation specific. + + + + + The minimum depth required to compute a magnitude. + This settings has no effect with e.g. scautopick + as there is no information about the source of + the event to retrieve the depth. The default + value is implementation specific. + + + + + The maximum depth allowed to compute a magnitude. + This settings has no effect with e.g. scautopick + as there is no information about the source of + the event to retrieve the depth. The default + value is implementation specific. + + + + + Define how the region check is performed to accept + a source receiver pair for further processing. + Allowed values are: + + source: Only the source/origin must be within + the region. + + source-receiver: Both source/origin and + receiver/station must be within the region. + + raypath: The entire path between source/origin + and receiver/station must be withing the region. + + + + + Linear part of the default station correction + for this region. + + + + + Constant part of the default station correction + for this region. + + + + + + + + + + + Agency ID + Agency ID + + + Datacenter ID + Datacenter ID + + + Organization string + + This is used mainly by SeedLink and Arclink to + identify the server organization. Also, the ArcLink full + SEED volumes will have this organization value set. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Configure global parameters of a station for modules. + + + + + The sensor location code of the preferred stream used e.g. + by scautopick and scrttv. + + + + + The channel code of the preferred stream used by + e.g. scautopick and scrttv. If no component code is given, + the vertical component will be fetched from inventory + considering the channel orientation (azimuth, dip). + If that approach fails, 'Z' will be appended and used as + fallback. Do not use regular expressions! + + + + + Define general parameters for amplitudes of a certain type. + Create amplitude type profiles to define the time windows, + minimum signal-to-noise ratio, amplitude thresholds and + restitution for measuring amplitudes of a certain type. + + + + Define the saturation threshold for the optional saturation + check. + By default, the saturation check is disabled but giving a + value above 0 will enable it. Waveforms that are saturated + are not used for amplitude calculations. + + This value can either be an absolute value of counts such + as "100000" counts or a relative value + (optionally in percent) with respect to the number of + effective bits, e.g. "0.8@23" or "80%@23". + The first version uses 1**23 * 0.8 whereas the latter + uses 1**23 * 80/100. + + The boolean value "false" explicitly disables + the check. + + + + + Define if amplitude calculation is enabled. If disabled, + this station will be skipped for amplitudes and magnitudes. + + + + + Activate deconvolution for this station. If no responses are + configured, an error is raised and the data is not processed. + This flag will be overridden by the type specific flag + (see Amplitude type profile). + + + + + Several parameters if usage of full responses is enabled. + The parameters of this group will be overridden by type + specific settings if given (see Amplitude type profile). + + + + Define the length of the taper at either side of the + waveform. The length will be added to the data + request: start - taper and end + taper. + + + + + The minimum frequency of the considered spectrum. + + After data is converted into the frequency domain that + minimum frequency defines the end of the left-side cosine + taper applied to the spectrum. The taper applies from + 0 Hz to {minFreq} Hz. A value of 0 or lower disables + that taper. + + + + + The maximum frequency of the considered spectrum. + + After data are converted in to the frequency domain that + maximum frequency defines the start of the right-side + cosine taper applied to the spectrum. The taper applies from + {maxFreq} Hz to {fNyquist} Hz. + A value of 0 or lower disables that taper. + + + + + + Allow to configure the Wood-Anderson seismometer response. + The default values are according to the version of + Gutenberg (1935). The newer version by Uhrhammer and + Collins (1990) is part of the IASPEI Magnitude + Working Group recommendations of 2011 September 9. This + version uses gain=2800, T0=0.8 s, h=0.8. + + + + The gain of the Wood-Anderson response. + + + + + The eigen period of the Wood-Anderson seismometer. + + + + + The damping constant of the Wood-Anderson seismometer. + + + + + + An amplitude profile configures global parameters for a + particular amplitude type. The available amplitude types + are not fixed and can be extended by plugins. The name of + the type must match the one defined in the corresponding + AmplitudeProcessor. + + + + Define the saturation threshold for the optional + saturation check. By default, the saturation check is + disabled but giving a value above 0 will enable it. + Waveforms that are saturated are not used for amplitude + calculations. + + This value can either be an absolute value of counts such + as "100000" counts or a relative value + (optionally in percent) with respect to the number of + effective bits, e.g. "0.8@23" or "80%@23". + The first version uses 1**23 * 0.8 whereas the latter + uses 1**23 * 80/100. + + The boolean value "false" explicitly disables + the check. + + + + + Define if amplitude calculation of certain type is enabled. + + + + + Activate deconvolution for this amplitude type. If not + set, the global flag "amplitudes.enableResponses" + will be used instead. + + + + + Define the mininum SNR to be reached to compute the + amplitudes. This value is amplitude type specific and + has no global default value. + + + + + Override the default time (relative to the trigger + time) of the begin of the noise window used to compute + the noise offset and noise amplitude. Each amplitude + processor sets its own noise time window and this option + should only be changed if you know what you are doing. + + + + + Override the default time (relative to the trigger + time) of the end of the noise window used to compute + the noise offset and noise amplitude. Each amplitude + processor sets its own noise time window and this option + should only be changed if you know what you are doing. + + + + + Override the default time (relative to the trigger + time) of the begin of the signal window used to compute + the final amplitude. Each amplitude processor sets its + own signal time window and this option should only be + changed if you know what you are doing. + + + + + Override the default time (relative to the trigger + time) of the end of the signal window used to compute + the final amplitude. Each amplitude processor sets its + own signal time window and this option should only be + changed if you know what you are doing. + + + + + The minimum distance required to compute an amplitude. + This settings has no effect with e.g. scautopick as there + is no information about the source of the event to compute + the distance. The default value is implementation + specific. + + + + + The maximum distance allowed to compute an amplitude. + This settings has no effect with e.g. scautopick as there + is no information about the source of the event to compute + the distance. The default value is implementation + specific. + + + + + The minimum depth required to compute an amplitude. + This settings has no effect with e.g. scautopick as there + is no information about the source of the event to + retrieve the depth. The default value is implementation + specific. + + + + + The maximum depth allowed to compute an amplitude. + This settings has no effect with e.g. scautopick as there + is no information about the source of the event to + retrieve the depth. The default value is implementation + specific. + + + + + Control if the amplitude calculation should be + regionalized or not. The regions and their configuration + are taken from the corresponding magnitude profiles. If + regionalization is activate, then e.g. modules without + origin information will not be able to compute an + amplitude as the origin is required to determine the + effective settings. + + If amplitudes for this particular type shall be computed + regardless of any defined regions, set this parameter to + false. + + + + + Several parameters if usage of full responses is enabled. + + + + Define the length of the taper at either side of the + waveform. The length will be added to the data + request: start - taper and end + taper. + + + + + After data are converted in to the frequency domain + that minimum frequency defines the end of the left-side + cosine taper for the frequency spectrum. The taper + applies from 0 Hz to {minFreq} Hz. + A value of 0 or lower disables that taper. + + + + + After data are converted in to the frequency domain + that maximum frequency defines the start of the right-side + cosine taper for the frequency spectrum. The taper + applies from {maxFreq} Hz to {fNyquist} Hz. + A value of 0 or lower disables that taper. + + + + + + + + Define magnitude parameters independent of amplitude-type profiles. + For magnitude correction parameters, e.g, network of station + corrections, create a magnitude type profile. + + + + A magnitude profile configures global parameters for a + particular magnitude type. The available magnitude types + are not fixed and can be extended by plugins. The name of + the type must match the one defined in the corresponding + MagnitudeProcessor. + + + + Part of the magnitude station correction. The final + magnitude value is multiplier*M+offset. This value + can be regionalized with name-value pairs. + The name is the name of the region. Without + a name the value is the default without regionalization. + Example: "1.0, regionA: 0.9, regionB: 1.1". + + + + + Part of the magnitude station correction. The final + magnitude value is multiplier*M+offset. This value + can be regionalized with name-value pairs. + The name is the name of the region. Without + a name the value is the default without regionalization. + Example: "0.0, regionA: -0.1, regionB: 0.2". + + + + + + + Parameters controlling the second-stage picker for re-picking P + phases after an initial detection. + + + + AIC picker is an implementation using the simple non-AR + algorithm of Maeda (1985), + see paper of Zhang et al. (2003) in BSSA. The picker + interface name to be used in configuration files, e.g. of + scautopick, is "AIC". + + + + Override the relative data acquisition time (relative to + the triggering pick). This adds a margin to the actual + processing and is useful to initialize the filter (Example: + bandpass). The data is not used at all until signalBegin + is reached. The data time window start is the minimum of + noiseBegin and signalBegin. + + + + + Override the default time (relative to the trigger + time) of the begin of the signal window used to pick. + + + + + Override the default time (relative to the trigger + time) of the begin of the signal window used to pick. + + + + + Override the default filter which is "raw". The typical + filter grammar can be used. + + + + + Mininum SNR as returned from AIC for accepting picks. + + + + + + Bkpicker is an implementation of the Baer/Kradolfer picker + adapted to SeisComP. It was created by converting Manfred + Baers from Fortran to C++ and inserting it as a replacement + for the picker algorithm. The picker interface name to be + used in configuration files, e.g. of scautopick,is + "BK". + + + + Override the relative data acquisition time (relative to + the triggering pick). This adds a margin to the actual + processing and is useful to initialize the filter (Example: + bandpass). The data is not used at all until signalBegin + is reached. The data time window start is the minimum of + noiseBegin and signalBegin. + + + + + Override the default time (relative to the trigger + time) of the begin of the signal window used to pick. + + + + + Override the default time (relative to the trigger + time) of the begin of the signal window used to pick. + + + + + BP (Bandpass) is currently the only option. + + + + Number of poles. + + + + Bandpass lower cutoff freq. in Hz. + + + + + Bandpass upper cutoff freq. in Hz. + + + + + Threshold to trigger for pick (c.f. paper), default 10 + + + + + Threshold for updating sigma (c.f. paper), default 20 + + + + + + + Parameters controlling the secondary picker. Example: for picking S + phases. + + + + L2 is an algorithm to pick S-phases based on existing + P-phases. It works by creating the L2 norm of the two filtered + horizontal components and then running the `detecFilter` on + the L2 trace to find the S pick. Finally AIC is applied + around the detected pick time to refine the detection. + The picker name to be used in configuration files is + "S-L2". + + + + Override the relative data processing start time + (relative to the triggering pick). This adds a margin to + the actual processing and is useful to initialize the + filter (Example: bandpass). The data is not used at all + until signalBegin is reached. The data time window start + is the minimum of noiseBegin and signalBegin. + + + + + Override the relative start time (relative to the + triggering pick) of the begin of the signal processing. + + + + + Override the relative end time (relative to the triggering + pick) of the end of the signal window used to pick. + + + + + Configure the filter used to process the horizontal + components traces before computing the L2-norm. + + + + + Configure the detector filter applied on the filtered + L2 trace. + + + + + The detector threshold that triggers the AIC picker. + + + + + The time correction added to the detection + time before AIC time window is computed. + + + + + The AIC time window around the detection used to pick. + If 0, AIC is not used. + + + + + Minimum SNR as returned from AIC for accepting picks. + + + + + + This S-picker works similarly to L2 but it only uses the + vertical component to pick S-phases. The picker name to use + is configuration files is "S-V" + + + + Override the relative data processing start time + (relative to the triggering pick). This adds a margin to + the actual processing and is useful to initialize the + filter (Example: bandpass). The data is not used at all + until signalBegin is reached. The data time window start + is the minimum of noiseBegin and signalBegin. + + + + + Override the relative start time (relative to the + triggering pick) of the begin of the signal processing. + + + + + Override the relative end time (relative to the triggering + pick) of the end of the signal window used to pick. + + + + + Configure the filter used to process the raw vertical + component data. + + + + + Configure the detector filter applied on filtered data. + + + + + The detector threshold that triggers the AIC picker. + + + + + The time correction added to the detection time + before AIC time window is computed. + + + + + The AIC time window around the detection used to pick. + If 0, AIC is not used. + + + + + Define the mininum SNR as returned from AIC. + + + + + + + + diff --git a/etc/descriptions/global_dfx.xml b/etc/descriptions/global_dfx.xml new file mode 100644 index 0000000..67e4955 --- /dev/null +++ b/etc/descriptions/global_dfx.xml @@ -0,0 +1,81 @@ + + + + global + + Implementation of the CTBTO/IDC polarization analysis for + three-component stations. + + + + + Implementation of the CTBTO/IDC polarization analysis for + three-component stations. + + + + + Feature extraction types, e.g. for scautopick. + + + + Three component feature extraction algorithm ported from the + automatic processing system implemented at CTBTO/IDC. The + documentation of the methods is not publically available. + In case of questions and comments, please contact CTBTO by + email: support@ctbto.org. + + + + Butterworth filter options of the bandpass. + + + + The order of the filter. + + + + + The lower cut-off frequency. A negative value or zero + will disable a lower cut-off frequency. + + + + + The upper cut-off frequency. A negative value or zero + will disable a upper cut-off frequency. + + + + + + + Length of the covariance window in seconds. + + + + + The fraction of the covariance window to overlap. + + + + + A station-dependent parameter used to compute a polarization slowness estimate. + + + + + A station-dependent parameter used to compute the back azimuth and slowness errors. + + + + + A station-dependent parameter used to compute the back azimuth and slowness errors. + + + + + + + + diff --git a/etc/descriptions/global_fixedhypocenter.xml b/etc/descriptions/global_fixedhypocenter.xml new file mode 100644 index 0000000..b010bce --- /dev/null +++ b/etc/descriptions/global_fixedhypocenter.xml @@ -0,0 +1,48 @@ + + + + global + Locator for re-computing source time with fixed hypocenter + + + + Locator parameters: FixedHypocenter + + + + Defines a list of available travel time tables. Each item + is a tuple separated by a slash with format "[interface]/[model]". + Built-in interfaces are "LOCSAT" and "libtau". + Other interfaces might be added via plugins. Please check their + documentation for the required interface name. + + + + + Whether to use pick time uncertainties rather than a fixed + time error. If true, then the uncertainties are retrieved from + each individual pick object. If they are not defined, then the + default pick time uncertainty as defined by defaultTimeError + will be used instead. + + + + + The default pick time uncertainty if pick uncertainties are + not going to be used or if they are absent. + + + + + Number of degrees of freedom used for error estimate. + + + + + Confidence level between 0.5 and 1. + + + + + + diff --git a/etc/descriptions/global_gui.xml b/etc/descriptions/global_gui.xml new file mode 100644 index 0000000..921f364 --- /dev/null +++ b/etc/descriptions/global_gui.xml @@ -0,0 +1,1653 @@ + + + + global + + The GUI configuration plugin extends the configuration of + graphical user interfaces to various options to adjust the look + and feel. + + + + + Configures the target messaging groups for various object types. + These parameters should only be touched if you know what you + are doing. + + + + Defines the target messaging group for manual picks, e.g. + made in scolv. + + + + + Defines the target messaging group for amplitudes, e.g. + computed in scolv. + + + + + Defines the target messaging group for magnitudes. scolv does + not use this group but sends magnitudes together with the + origin to the origin group. + + + + + Defines the target messaging group for origins created in + e.g. scolv. + + + + + Defines the target messaging group for focal mechanisms created in + e.g. scolv. + + + + + Defines the target messaging group for events and event + journal entries. + + + + + + + Specified the location and the structure of the map tiles to be used. This + path is composed of zero or more directives and must include at least one + conversion specification which starts with is introduced by the character % + followed by a conversion specifier. Valid specifiers are s (replaced by + tile ID), l (tile level), c (tile column) and r (tile row). An example for + using the OpenStreetMap file structure is /path/to/maps/%l/%c/%r.png. + + + + + Projection of the map tiles. Supported formats are: rectangular and mercator. + + + + + Cache size of the map tiles. If 0 is specified a default + cache size of 32mb is used. The higher the cache size + the better the performance in higher resolutions. A + higher cache size causes less image loader calls but + requires more client memory. + + + + + Used to distinguish tile store implementations provided by plug-ins. + + + + + Allows to add custom layers that are included via plugins. + This is a list of layer names. A plugin must implement + the layer interface and register itself with the + name used in this list. The order of layers is the + default order. The custom layers are prepended to + the maps defaults layers such as the grid and the cities. + + + + + Defines the order of all configured layers. This + includes the standard layers (grid, cities) as + well as custom layers. The name of the grid layer + is "grid" and the name of the cities + layer is "cities". + + + + + + Configuration options for the events layer that + shows all events on the map that are loaded in the + event list. + + + + Show the events layer on maps. Currently only supported + by scolv. + + + + + + Configuration for the fep layer showing the polygons + of FEP (FLinn-Engdahl-Polygon) files on maps if they + exist in @DATADIR@/fep or @CONFIGDIR@/fep. + + + + Show the fep layer on maps. + + + + + + + Maximum number of cities to be rendered. If + cityPopulationWeight is less or equal than 0 then + all cities are rendered ordered by population count, + highest first. + To show the N most populated places in the visible + map region, set + "scheme.map.cityPopulationWeight" to 0 + and set this parameter to N. + + + + + + + Zoom sensitivity of the map + + + + + + This group defines various options for color, pen, brush, font, etc. for SeisComP + graphical user interfaces. There are various conventions to + define colors, fonts and gradients. + + Colors are defined in HTML + convention, e.g. as rgb values, hexadecimal numbers or color + keyword names defined by W3C. If rgb or rgba is used, it must + be quoted because the comma is handled as list separator by + the configuration. + Examples: "rgb(255,0,0)", FF00FF40, green. + + Gradients are configured as lists of tuples where each tuple + is colon separated in the form value:color. Value is either + int or double and color is again a color definition. + Example: 0:yellow,10:red + + + Show menu bar. + + + + Show status bar. + + + + + Set position if tab bar. An unset value lets the application + decide where to place the tab bar. This option might not be + supported by all applications. Valid positions are: off, north, + south, east, west + + + + + + + The station symbol size (e.g. in scmv). + + + + + The origin symbol minimum size. The formula to compute the + size of the origin symbol is: 4.9*(M-1.2). + + + + + Apply antialiasing to map layers. + This improves the visual quality but decreases performance. + + + + + Apply bilinear filtering to maps. + The bilinear filter improves the visual quality but decreases + performance slightly. It is only used for static map images. + Not while dragging. + + + + + Display the latitude/longitude grid on maps + + + + + Show cities defined in "citiesXML" on maps + + + + + Controls at which zoom level a city will be visible. The + following formula is used: + screen_width (km) * weight >= population + + + + + Show custom layers on maps + + + + + Show map legends initially. Some applications provide + controls to toggle the visibility in addition to this + option. + + + + + SeisComP ships with the rectangular projection built-in. + Other projections may be provided through plugins. + + + + + Converts map colors from RGB color scheme to BGR. + + + + + Minimum screen distance to plot a polygon or polyline line segment. + + + + + + Colors can be configured as hexadecimal numbers or color keyword names + defined by W3C. + + + A general application background color. Can be used to give + each application a different background color. An unset value + lets Qt decide. + + + + + Sets desired colors for particular agencyIDs. It depends on the + applications if they honor this setting or not. The event list + will render the agencyID string with the defined colors. + This is a list of tuples (AGENCY:COLOR), + e.g. "GEOFON:black, USGS:blue". + + + + + + + The color of lines in the map (e.g. lines + connecting the origin and a station). + + + + + The color of station outlines in the map. + + + + + Defines the pen of the latitude/longitude grid of the map. + + + The color of the pen. + + + The style of the pen. Supported values are: NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + + + The width of the pen. + + + + + The color of station annotations. + + + + + The color of city labels. + + + + + The color of city outlines. + + + + + The color of a capital. + + + + + The color of a "normal" city. + + + + + + Appearance of map annotations including a text + color (pen), a border color (pen) and a background + color (brush). + + + + Text pen for non highlighted annotations. + + + The color of the pen. + + + + The style of the pen. Supported values are: + NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + + + + The width of the pen. + + + + + Border pen for non highlighted annotations. + + + The color of the pen. + + + + The style of the pen. Supported values are: + NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + + + + The width of the pen. + + + + + Background brush for non highlighted annotations. + + + The color of the brush. + + + + The style of the brush. Supported values are, e.g.: + solid, dense1, dense7, horizontal, vertical, + cross, bdiag, fdiag, diagcross. + + + + + + Text pen for highlighted annotations. + + + The color of the pen. + + + + The style of the pen. Supported values are: + NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + + + + The width of the pen. + + + + + Border pen for highlighted annotations. + + + The color of the pen. + + + + The style of the pen. Supported values are: + NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + + + + The width of the pen. + + + + + Background brush for highlighted annotations. + + + The color of the brush. + + + + The style of the brush. Supported values are, e.g.: + solid, dense1, dense7, horizontal, vertical, + cross, bdiag, fdiag, diagcross. + + + + + Font point size of the label text. + + + + + + + The general color of records/traces. + + + A general trace color of the alternate trace (eg scheli). + + + The general background color of records/traces. + + + A general background color of the alternate trace. + + + The trace color used on top of a spectrogram. + + + The color of data gaps in trace views. + + + The color of data overlaps in trace views. + + + The color of the alignment marker in trace views. + + + Properties of record borders + + Standard properties + + + Defines the pen of the border line. + + + The color of the pen. + + + + The style of the pen. Supported values are: + NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + + + + The width of the pen. + + + + + Defines the brush of the enlcosed area. + + + The color of the pen. + + + + The style of the brush. Supported values are, e.g.: + solid, dense1, dense7, horizontal, vertical, + cross, bdiag, fdiag, diagcross. + + + + + + Properties for records with valid signatures + + + Defines the pen of the border line. + + + The color of the pen. + + + + The style of the pen. Supported values are: + NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + + + + The width of the pen. + + + + + Defines the brush of the enlcosed area. + + + The color of the pen. + + + + The style of the pen. Supported values are, e.g.: + solid, dense1, dense7, horizontal, vertical, + cross, bdiag, fdiag, diagcross. + + + + + + Properties for records with invalid signatures + + + Defines the pen of the border line. + + + The color of the pen. + + + + The style of the pen. Supported values are: + NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + + + + The width of the pen. + + + + + Defines the brush of the enlcosed area. + + + The color of the pen. + + + + The style of the pen. Supported values are, e.g.: + solid, dense1, dense7, horizontal, vertical, + cross, bdiag, fdiag, diagcross. + + + + + + + + Defines the pen of the record offset line. + + + The color of the pen. + + + The style of the pen. Supported values are: NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + + + The width of the pen. + + + + + Defines the pen of the record grid. + + + The color of the pen. + + + The style of the pen. Supported values are: NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + + + The width of the pen. + + + + + Defines the pen of the secondary record grid. + + + The color of the pen. + + + The style of the pen. Supported values are: NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + + + The width of the pen. + + + + + Defines the background color of records depending on their state. + + + + Additional data which was not requested. + + + + + Requested data Background color of requested data. + + + + + Data currently loading. + + + + + Data which was requested but is not available. + + + + + + + + The color of manual picks. + + + The color of automatic picks. + + + The color of picks with undefined state. + + + The color of disabled picks. + + + + + + + The color of manual arrivals (arrivals that bind manual picks, + e.g. residual plot of scolv, manual picker, ...) + + + + The color of automatic arrivals, + + + The color of theoretical arrivals. + + + The color of arrivals binding picks with undefined state. + + + The color of disabled arrivals. + + + + The gradient of arrivals residuals. A gradient is defined as + a list of tuples separated by colon where the first item is + the value and the second is the color. Colors can be given in + rgb notation or hexadecimal. When rgb is used double quotes are needed to + protect the comma inside the rgb definition, e.g. + -8:"rgb(0,0,100)", -4:"rgb(0,0,255)", -3:"rgb(100,100,255)", ... + + + + + + + The color of active magnitudes. + + + The color of inactive magnitudes. + + + The color of disabled magnitudes. + + + The gradient of magnitude residuals. + + + + + + The color of the station name. + + + The color of associated stations (e.g. in scmv). + + + The color of triggered stations. + + + + + + + + + + + + The color of disabled stations. + + + The color of idle stations. + + + + + + The color of QC.delay thresholds in scmv. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The color of ground motion amplitudes in scmv. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The color of the selected zoom area (e.g. manual picker). + + + + + + + + The map legend background color. + + + + + The map legend border color. + + + + + The map legend text color. + + + + + The map legend header color. + + + + + + + + + The depth gradient. + + + + Setting this parameter to true will not interpolate between + the depth steps and the color for a depth <= input is used. + + + + + + + + The origin status colors (e.g. in event list). + + + + + + + + + + + + Defines colors used in the splash screen shown at application startup. + + + + Text color of the message string. + + + + + Text color of the version string. + + + + + + + + + The line width of the marker (e.g. picks of manual picker). + + + + + + + + The line width of the records / traces. + + + + + Configures antialiasing of records / traces. Antialiasing + needs more two times to storage space as non antialiasing + but it improves visual quality. + + + + + Configures optimization of trace polylines. If activated + then lines on the same pixel line or same pixel row + collapse into single lines. + + + + + + Mode for drawing record borders as box or line on top or + bottom. + + Supported values: "topline","box","bottomline" + + + + + + + + + The general base font of an application. This overrides + the default Qt4 application font. + + + + Sets the family name of the font. The name is case insensitive and may include a foundry name. + + + + + Defines the point size of the font + + + + + + + + + + The smallest available font. If undefined the point size is 2 points smaller than the base font. + + + + Sets the family name of the font. The name is case insensitive and may include a foundry name. + + + + + Defines the point size of the font + + + + + + + + + + The default text font. If undefined the point size is 2 points larger than the base font. + + + + Sets the family name of the font. The name is case insensitive and may include a foundry name. + + + + + Defines the point size of the font + + + + + + + + + + The largest text font. If undefined the point size is 6 points larger than the base font. + + + + Sets the family name of the font. The name is case insensitive and may include a foundry name. + + + + + Defines the point size of the font + + + + + + + + + + Font used to highlight text. If undefined it equals the normal font except for a bold font face. + + + + Sets the family name of the font. The name is case insensitive and may include a foundry name. + + + + + Defines the point size of the font + + + + + + + + + + The largest heading font. If undefined it uses a bold font face and a font size twice as large as the normal font. + + + + Sets the family name of the font. The name is case insensitive and may include a foundry name. + + + + + Defines the point size of the font + + + + + + + + + + The second largest heading font. If undefined it uses a bold font face and a font size twice as large as the base font. + + + + Sets the family name of the font. The name is case insensitive and may include a foundry name. + + + + + Defines the point size of the font + + + + + + + + + + The smallest heading font. If undefined it uses a bold font face and a font size 4 points larger than the base font. + + + + Sets the family name of the font. The name is case insensitive and may include a foundry name. + + + + + Defines the point size of the font + + + + + + + + + + Font used for city labels. If undefined it equals the base font. + + + + Sets the family name of the font. The name is case insensitive and may include a foundry name. + + + + + Defines the point size of the font + + + + + + + + + + Font used for version string in the splash dialog shown at application startup. If undefined it equals the base font with a bold font face and a font size of 12. + + + + Sets the family name of the font. The name is case insensitive and may include a foundry name. + + + + + Defines the point size of the font + + + + + + + + + + Font used for the message text in the splash dialog shown at application startup. If undefined it equals the base font with a font size of 12. + + + + Sets the family name of the font. The name is case insensitive and may include a foundry name. + + + + + Defines the point size of the font + + + + + + + + + + + + + The precision of depth values. + + + + + The precision of distance values. + + + + + The precision of lat/lon values. + + + + + The precision of magnitude values. + + + + + The precision of origin times. + + + + + The precision of pick times. + + + + + Precision of displayed offset/amp in all trace widgets. + + + + + Precision of RMS values. + + + + + Precision of uncertainty values, e.g. latitude errors. + + + + + + + Display distances in km? + + + + + + + Display times in localtime or UTC (default). + + + + + + Defines the appearance of the splash screen shown at application startup. + + + + Position of the message text. + + + + Qt::Alignment bit mask. Default: AlignHCenter | AlignBottom + + + + + Position in screen coordinates. + + + + Horizontal position. + + + + + Vertical position. + + + + + + + Position of the version string + + + + Qt::Alignment bit mask. Default: AlignRight | AlignBottom + + + + + Position in screen coordinates. + + + + Horizontal position. + + + + + Vertical position. + + + + + + + + + + Defines maximum age of events to load. The value of all parameters are aggregated. + + + + Age in days. + + + + + Age in hours. + + + + + Age in minutes. + + + + + Age in seconds. + + + + + + + Configuration of special applications modes. + + + + Defines if application interaction is allowed. + + + + + Defines if the application should be launched in fullscreen + mode hiding title bar, main menu and status bar. + + + + + + + Configures the default filters selectable in manual picker. + The entry with a leading "@" is selected as default filter. + + + + + + Control the Events tab, e.g. in scolv, showing the list of loaded events. + + + + Configure the columns of the event list that are visible initially. + The first column containing the origin time is always visible and cannot + be hidden. Possible values are: Type, M, TP, Phases, Lat, Lon, Depth, DType, Stat, AzGap, FM, Origins, Agency, Author, Region, ID. + + + + + Custom column showing origin or event comments. + + + + Name of the custom column to be shown in the column + header. + + + + + ID of the origin comment to look up. + + + + + ID of the event comment to look up. + + + + + Position of the column. If the configured position is less than 0 or if it + exceeds the total number of columns then the column is appended to the right. + + + + + Default value to display if the specified origin or event comment id was not found. + + + + + Mapping of comment values to colors used as text color. E.g. "foo:#000,bar:red". + + + + + + Custom columns showing parameters extracted by scripts e.g. + from origins or events. + + + + Name of custom column profiles to be registered. Comma separated list. + + + + + Definition of custom column profiles for creating custom + event list columns whose values are filled by external scripts. + The scolv documentation provides an example script. + + + + + External script to invoke for each event list entry. The object represented by the + list entry is serialized to XML and passed to the script on stdin. If the return + code of the script is 0 ('success') then the script result is read from stdout and + displayed in the corresponding event list cell. + + + + + Position of the column. If the configured position is less than 0 or if it + exceeds the total number of columns then the column is appended to the right. + + + + + Column name shown in header of event list table. + + + + + Object types this script should be invoked for. + Supported values are 'Event' and 'Origin'. + + + + + + + + + Filter the loaded event list by agency ID. + + + + Defines the text of the option "Show only own events". + Use double quotes '"' around the string if it contains spaces. + + + + + Sets a list of preferred agencies. Events from preferred + agencies are defined as "own" events. + + + + + Sets the type of the filter. If type is "events" the agency + of the preferred origin of the event is checked. + If type is "origins" the agency of all origins of + an event is checked and if at least one origins agency is part + of the whitelist it will pass the filter. Or in other words, + the event is hidden if no origin is from a preferred agency. + + + + + Sets the default state of the "Show only own events" + option. + + + + + + Filter the loaded event list by region within or outside a + bounding box. Register the profile name in the regions parameter. + + + + Add the defined region profiles separated by comma. + The order determines the ocurrence in the filter menu. + + + + + Hide events by the defined region by default. + + + + + + Define a rectangular region to filter the loaded event list. + + + + Defines the name of the region that shows up in the + listbox. + + + + + Defines a rectangular region with a list of 4 values: + latmin, lonmin, latmax, lonmax. + + + + + Defines the name of the polygon for the region check. + If defined then the rect region has no effect. + The configured polygon name is being search for + in the global FEP regions and the spatial + vector layer. The first polygon found with + the given name will be taken. + + + + + + + + Filter the loaded event list by event types. + + + + Defines the text of the option "Hide other/fake events". + Use double quotes '"' around the string if it contains spaces. + + + + + List of event type to be hidden if the "Hide other/fake events" + option is ticked. + Use double quotes '"' around the string if it contains spaces. + + + + + Sets the default state of the "Hide other/fake events" + option. + + + + + + Pre-set options to filter a database request to load events + into the event list. + + + + Minimum latitude + + + + + Maximum latitude + + + + + Minimum longitude + + + + + Maximum longitude + + + + + Minimum depth + + + + + Maximum depth + + + + + Minimum magnitude + + + + + Maximum magnitude + + + + + + + + Control the Event tab, e.g. in scolv, showing origins and + focal mechanisms of a selected event. + + + + + Configure the columns of the event edit origin table + that are visible initially. + Possible values are: Phases, Lat, Lon, Depth, DType, RMS, Stat, + Method, Agency, Author, Region + + + + + Custom column showing origin comments. + + + + Name of the custom column to be shown in the column + header. + + + + + ID of the origin comment to look up. + + + + + Position of the column. If the configured position is less than 0 or if it + exceeds the total number of columns then the column is appended to the right. + + + + + Default value to display if the specified origin or event comment id was not found. + + + + + Mapping of comment values to colors used as text color. E.g. "foo:#000,bar:red". + + + + + + + + Configure the columns of the event edit focal mechanism + tab that are visible initially. + Possible values are: Depth, M, Count, Misfit, STDR, + AzGap, Stat, DC, CLVD, ISO, S1, + D1, R1, S2, D2, R2, Agency, Author + + + + + + + Parameters controlling the event summary view used e.g. in scolv. + + + + + Set an alert for every event comment that ID matches + the specified regular expression, e.g. "alert_.*". + + + + + List of comments to ignore, e.g. "nil". + + + + + Discrete mapping of time values in seconds to colors used as + text color in case of an active alert. E.g. "0:00FF00,900:FF0000". + + + + + The text size of the time ago label in case of an active alert. + + + + + + + diff --git a/etc/descriptions/global_homogeneous.xml b/etc/descriptions/global_homogeneous.xml new file mode 100644 index 0000000..dd2a74e --- /dev/null +++ b/etc/descriptions/global_homogeneous.xml @@ -0,0 +1,43 @@ + + + + global + Travel time table for a homogeneous velocity model + + + + + Travel-time interface for homogeneous velocity models. + + + + Parameters defining the area where the profile applies and the P/S velocities. + Once defined, the profile can then be registered in ttt.homogeneous.tables + + + + + Geographic origin of the region. Expects 2 values: latitude, longitude. + + + Radius validity of the region. + + + Min Depth validity of the region. + + + Max Depth validity of the region. + + + P wave velocity. + + + S wave velocity. + + + + + + + + diff --git a/etc/descriptions/global_hypo71.xml b/etc/descriptions/global_hypo71.xml new file mode 100644 index 0000000..25d9b11 --- /dev/null +++ b/etc/descriptions/global_hypo71.xml @@ -0,0 +1,87 @@ + + + + global + + The traditional Hypo71PC locator with SeisComP. + + + + + General Hypo71 configuration parameters. + + + + Temporary file used by Hypo71 to store calculation logs. + + + + + Temporary file to write Hypo71 input data to. + + + + + Temporary output file to read Hypo71 location data from. + + + + + Hypo71 default profile. + If no custom profile is specified, this profile will be used by the plugin when proceeding to a localization. + + + + + Bash script executed when calling the Hypo71 locator plugin for locating the earthquake. + + + + + Hypo71 profile name. + Multiples names may be set. They must be separated by comma. + Each profile can have different velocity or parameters. + + + + + Profiles containing the profile-specific velocity model and the Hypo71 parameters. + + + + + Profile's velocity model name. + + + + + Profile's method. + It is generally the locator's name (Hypo71). + + + + + File containing the profile parameters. + + + + + If the depth is requested to be fixed (e.g. by ticking the option + in scolv) the plugin performs only one location starting at + specified depth but with free depth evaluation. This option + defines whether it should really fix the depth (false) or + use this fixed depth only as starting point (true). + + + + + + Custom patternID to use when generating origin publicID + + + Specifies if the given publicD should be used for generating origin publicID + + + + + diff --git a/etc/descriptions/global_iloc.xml b/etc/descriptions/global_iloc.xml new file mode 100644 index 0000000..55ac33a --- /dev/null +++ b/etc/descriptions/global_iloc.xml @@ -0,0 +1,203 @@ + + + + global + + Locator in SeisComP implemented by the plugin lociloc. + + + + + Locator parameters: iLoc + + + + iLoc profile name. + Multiples names may be set separated by comma. + Each profile can have different velocity or parameters. + + + + + Profiles containing the specific locator parameters. For + the global models, iasp91 and ak135, profiles are + automatically considered with defaults. + To adjust the profile parameters the corresponding profile + must be created. + + + + + + + + + + Name of globally applied velocity model + into which RSTT or the local model is integrated. If + unset, the name of the profile is considered instead. + + + + + Use regional seismic travel-time tables + + + + + Use regional seismic travel-time tables for Pn and Sn + + + + + Use regional seismic travel-time tables for Pg and Lg + + + + + Full path to a file containing the local velocity model. + Requires: UseLocalTT = true. Empty string or unset + disables using a local model in this profile. + + Example: + + @DATADIR@/iloc/localmodels/model.localmodel.dat. + + + + + + Perform neighbourhood algorithm + + + + + Neighbourhood Algorithm: Search radius around initial + epicentre + + + + + Neighbourhood Algorithm: Search radius around initial + depth + + + + + Neighbourhood Algorithm: Search radius around initial + origin time + + + + + Neighbourhood Algorithm: p-value for norm to compute + misfit [1,2] + + + + + Neighbourhood Algorithm: Maximum number of iterations + + + + + Neighbourhood Algorithm: Number of cells to be resampled + at each iteration + + + + + Neighbourhood Algorithm: Size of initial sample + + + + + Neighbourhood Algorithm: Size of subsequent samples + + + + + + Depth resolution: Minimum number of depth phases for depdp + + + + + Depth resolution: Maximum local distance + + + + + Depth resolution: Minimum number of local defining stations + + + + + Depth resolution: Maximum distance for using S-P travel-time differences. + + + + + Depth resolution: Minimum number of defining S-P phase pairs + + + + + Depth resolution: Minimum number of defining core reflection phases + + + + + Depth resolution: Maximum depth error for crustal free-depth + + + + + Depth resolution: Maximum depth error for deep free-depth + + + + + + Linearized inversion: Account for correlated errors + + + + + Linearized inversion: Used to exclude big residuals from solution + + + + + Linearized inversion: Allow damping of model vector + + + + + Linearized inversion: Minimum number of iterations + + + + + Linearized inversion: Maximum number of iterations + + + + + Linearized inversion: Minimum number of defining phases + + + + + Linearized inversion: Do not rename phases. Deactivating + allows to rename the phases automatically for this + profile. + + + + + + + + + diff --git a/etc/descriptions/global_locext.xml b/etc/descriptions/global_locext.xml new file mode 100644 index 0000000..3da8481 --- /dev/null +++ b/etc/descriptions/global_locext.xml @@ -0,0 +1,22 @@ + + + + global + + Locator which forwards the processing to external scripts + + + + + Locator parameters: ExternalLocator + + + + A list of profiles defined as tuples of name + and path to a script separated by colon. + + + + + + diff --git a/etc/descriptions/global_locsat.xml b/etc/descriptions/global_locsat.xml new file mode 100644 index 0000000..84a0b23 --- /dev/null +++ b/etc/descriptions/global_locsat.xml @@ -0,0 +1,75 @@ + + + + global + + Locator in SeisComP for computing source time and hypocenter + coordinates from phase picks. + + + + + Locator parameters: LOCSAT + + + + Defines a list of available LOCSAT travel-time tables. + + + + + The initial depth estimate for LOCSAT. + + + + + Whether to use pick time untertainties for arrival deltim rather + than a fixed time error. If true then the uncertainties are + retrieved from each individual pick object. If they are not + defined then the default pick time uncertainty will be used + as fallback. + + + + + The default pick time uncertainty assigned to LOCSAT's arrival deltim + attribute if pick uncertainties are not going to be used or + if they are absent. A time uncertainty of 0 s may result in + errors of the SVD decomposition in LOCSAT. + + + + + Whether to forward pick backazimuth to LOCSAT or not. In an + automatic mode backazimuth measurements might be inaccurrate + and disabling their usage in LOCSAT can be controlled with + this parameter. + + + + + Whether to forward pick horizontal slowness to LOCSAT or not. + In an automatic mode slowness measurements might be + inaccurrate and disabling their usage in LOCSAT can be + controlled with this parameter. + + + + + Number of degrees of freedom. + + + + + Confidence level between 0.5 and 1.0. + + + + + Compute the confidence ellipsoid from covariance matrix in 3D. + + + + + + diff --git a/etc/descriptions/global_mb.xml b/etc/descriptions/global_mb.xml new file mode 100644 index 0000000..dc3167d --- /dev/null +++ b/etc/descriptions/global_mb.xml @@ -0,0 +1,35 @@ + + + + global + + Body wave magnitude at teleseismic distances + + + + + Body wave magnitude at teleseismic distances measured at 1 s period. + + + + + + Parameters for computing mb magnitudes from mb amplitudes. + + + + Minimum epicentral distance for computing mb. Note: According + to the IASPEI recommendations in 2013, the minimum distance + should be 20 deg. + + + + + Maximum epicentral distance for computing mb. + + + + + + + diff --git a/etc/descriptions/global_mb_bb.xml b/etc/descriptions/global_mb_bb.xml new file mode 100644 index 0000000..f3b6d64 --- /dev/null +++ b/etc/descriptions/global_mb_bb.xml @@ -0,0 +1,35 @@ + + + + global + + Body wave magnitude at teleseismic distances similar to mb + + + + + The body wave magnitude at teleseismic distances similar to mb. + + + + + + Parameters for computing mB magnitudes from mB amplitudes. + + + + Minimum epicentral distance for computing mB. Note: According + to the IASPEI recommendations in 2013, the minimum distance + should be 20 deg. + + + + + Maximum epicentral distance for computing mB. + + + + + + + diff --git a/etc/descriptions/global_md.xml b/etc/descriptions/global_md.xml new file mode 100644 index 0000000..81f1c44 --- /dev/null +++ b/etc/descriptions/global_md.xml @@ -0,0 +1,115 @@ + + + + global + + Duration magnitude plugin + + + + + Duration magnitude plugin + + + + + + + Default filter type to use before processing and after deconvolution. It's possible to set : + 1 for a Wood-Anderson seismometer + 2 for a 5sec generic Seismometer + 3 for a WWSSN LP seismometer + 4 for a WSSN SP seismometer + 5 for a Generic Seismometer + 6 for a Butterworth Low pass filter + 7 for a Butterworth High pass filter + 8 for a Butterworth Band pass filter + 9 for a 1Hz eigen-frequency L4C seismometer + + + + + taper applied to the signal + + + + + signal length used to compute the duration magnitude + + + + + Butterworth filter parameter applied to the signal + + + + + Maximum depth at which duration magnitude is valid + + + + + Maximum distance between earthquake and station at which duration magnitude is valid + + + + + Signal to noise ratio below which the coda is reached + + + + + Maximum expected duration magnitude value + This is used to find how much data should be loaded for a given station by reversing the formula + + + + + FMA regional coefficient + See Hypo2000 manual + + + + + FMB regional coefficient + See Hypo2000 manual + + + + + FMD regional coefficient + See Hypo2000 manual + + + + + FMF regional coefficient + See Hypo2000 manual + + + + + FMZ regional coefficient + See Hypo2000 manual + + + + + Linear correction + + + + + Offset + + + + + Station correction + + + + + + + diff --git a/etc/descriptions/global_ml.xml b/etc/descriptions/global_ml.xml new file mode 100644 index 0000000..1f3ba9e --- /dev/null +++ b/etc/descriptions/global_ml.xml @@ -0,0 +1,115 @@ + + + + global + + Standard local (Richter) magnitude + + + + Standard local (Richter) magnitude + + + + + Regionalized calibration parameters for ML. The region itself is defined + by another magnitude-type ML profile. + + + + Add one profile for every region. The profile name + equals the name of a polygon configured in the BNA file + of the Magnitude-type profile. The Magnitude-type profile + and the polygon must exist. + The special name "world" corresponds to the + region of the entire planet as a fallback. + + + + + Overrides the calibration function log10(A0) + for computing ML per region. See logA0 + description in the bindings. + + + + + + + + + + + Standard local (Richter) magnitude + + + + + + Parameters for measuring ML amplitudes. Add more parameters + by adding an amplitude type profile 'ML', + + + + The filter applied to raw records before applying + Wood-Anderson simulation. Default: no pre-filtering. + + + + + This parameter allows to set how the amplitude is measured. + Either by finding the absolute maximum of the demeaned + trace (AbsMax), the difference of maximum and minimum of + the signal window (MinMax) or the maximum peak-trough + of one cycle (PeakTrough). + + Note that if absMax is already explicitly configured, this + parameter has no effect. + + + + + Defines the combiner operation for the amplitudes measured + on either both horizontal component. The default is to + use the average. Allowed values are: "average", + "min", "max" and "geometric_mean". + "geometric_mean" corresponds to averaging single-trace + magnitudes instead of their amplitudes. + + + + + + + + Parameters for computing ML magnitudes from ML amplitudes. + + + + The calibration function log10(A0). + + Format: any list of distance-value pairs separated by + comma. Values within pairs are separated by colon. + + Example: "0:-1.3,60:-2.8,100:-3.0,400:-4.5,1000:-5.85" + specifies 4 distance intervals from + 0...60, 60...100, 100...400 and 400...1000 km distance. + Within these intervals log10(A0) is interpolated linearly + between -1.3...-2.8, -2.8...-3.0, -3.0...-4.5 and -4.5...-5.8, + respectively. + + Note: The first and last distance samples limit the + maximum distance range for computing ML. + + + + + Maximum epicentral distance for computing ML. + No distance limitation for maxDistanceKm = -1. + + + + + + + diff --git a/etc/descriptions/global_mlc.xml b/etc/descriptions/global_mlc.xml new file mode 100644 index 0000000..f385eb7 --- /dev/null +++ b/etc/descriptions/global_mlc.xml @@ -0,0 +1,293 @@ + + + + global + + Custom magnitude for local events measured on horizontal components + + + + Custom magnitude for local events measured on horizontal components + + + + + Regionalized calibration parameters for MLc. The region itself is defined + by another magnitude-type MLc profile. + + + + Add one profile for every region. The profile name + equals the name of a polygon configured in the BNA file + of the Magnitude-type profile. The Magnitude-type profile + and the polygon must exist. + The special name "world" corresponds to the + region of the entire planet as a fallback. + + + + + Parameters for A0, non-parametric magnitude calibration. + + + + Overrides the calibration function log10(A0) + for computing MLc per region. See logA0 + description in the bindings. + + + + + + Parameters for parametric magnitude calibration: + MLc = log10(A) + c3 * log10(r/c5) + c2 * (r + c4) + c1 + c0(station) + + + + Overrides the calibration parameter c0 + for computing MLc per region. See c0 + description in the bindings. + + + + + Overrides the calibration parameter c1 + for computing MLc per region. See c1 + description in the bindings. + + + + + Overrides the calibration parameter c2 + for computing MLc per region. See c2 + description in the bindings. + + + + + Overrides the calibration parameter c3 + for computing MLc per region. See c3 + description in the bindings. + + + + + Overrides the calibration parameter c4 + for computing MLc per region. See c4 + description in the bindings. + + + + + Overrides the calibration parameter c5 + for computing MLc per region. See c5 + description in the bindings. + + + + + + + + + + + + Custom magnitude for local events measured on horizontal components + + + + + + Parameters for measuring MLc amplitudes. Add more parameters + by adding an amplitude type profile 'MLc', + + + + The filter applied to raw records before applying + Wood-Anderson simulation. + + + + + Applying Wood-Anderson simulation. To achieve displacement + records without WA simulation, an integration filter can + be applied with the pre-filter. + + + + + Scaling value multiplied to the measured amplitudes to + match the amplitude units expected by the magnitude + calibration function. + + Expected amplitudes are + in units of mym but actual amplitudes provided from + Wood-Anderson-corrected seismograms are in units of mm: + amplitudeScale = 1000. + + If data are not corrected for WA, measured amplitudes + take the unit of gain-corrected data considering the + preFilter: + amplitudeScale converts between units of measured and + excpected amplitude. + + + + + Type for measuring amplitudes. Available: + + AbsMax: absolute maximum + + MinMax: half difference between absolute maximum and minimum + + PeakTrough: half difference between maximum and minimum + on a half cycle + + + + + Define how to combine the amplitudes measured on both + horizontals components: + + min: take the minimum + + max: take the maxium + + avgerage: form the average + + geometric_mean: form the geometric mean + + + + + + + + Parameters for computing MLc magnitudes from MLc amplitudes. + + + + Considered distance measure between source and receiver. + Possible values are + + hypocentral: hypocentral distance + + epicentral: epicentral + + + + + The minimum distance for computing magnitudes from amplitudes. + Negative values deactivate the check. + + + + + The maximum distance for computing magnitudes from amplitudes. + Negative values deactivate the check. + + + + + The maximum depth up to which magnitudes are computed. + + + + + Type of magnitude calibration formula to be considered. + The calibration parameters are considered accordingly. + Currently supported are + + "parametric": consider parameters of parametric + configuration in parametric section + + "A0": consider parameters of non-parametric + configuration in A0 section. + + + + + Parameters for A0, non-parametric magnitude calibration. + Considered if magnitude.MLc.calibrationType = "A0". + + + + The non-parametric calibration function log10(A0). + + Format: any list of distance-value pairs separated by + comma. Values within pairs are separated by colon. + + Example: "0:-1.3,60:-2.8,100:-3.0,400:-4.5,1000:-5.85" + specifies 4 distance intervals from + 0...60, 60...100, 100...400 and 400...1000 km distance. + Within these intervals log10(A0) is interpolated linearly + between -1.3...-2.8, -2.8...-3.0, -3.0...-4.5 and -4.5...-5.8, + respectively. + + Note: The first and last distance samples limit the + maximum distance range for computing MLv. + + + + + + Parameters for parametric magnitude calibration: + MLc = log10(A) + c3 * log10(r/c5) + c2 * (r + c4) + c1 + c0(station) + + Considered if magnitude.MLc.calibrationType = "parametric". + + + + Station correction. This is the calibration value 'c0' + applied in the magnitude calibration formula + + MLc = c0(station) + c1 + c2 * (r + c4) + c3 * log(r/c5) + log10(A) + + + + + The calibration value 'c1' applied in the magnitude + calibration formula + + MLc = c0(station) + c1 + c2 * (r + c4) + c3 * log(r/c5) + log10(A) + + + + + The calibration value 'c2' applied in the + magnitude calibration formula + + MLc = c0(station) + c1 + c2 * (r + c4) + c3 * log(r/c5) + log10(A) + + + + + The calibration value 'c3' applied in the + magnitude calibration formula + + MLc = c0(station) + c1 + c2 * (r + c4) + c3 * log(r/c5) + log10(A) + + + + + The calibration value 'c4' applied in the + magnitude calibration formula + + MLc = c0(station) + c1 + c2 * (r + c4) + c3 * log(r/c5) + log10(A) + + + + + The calibration value 'c4' applied in the + magnitude calibration formula + + MLc = c0(station) + c1 + c2 * (r + c4) + c3 * log(r/c5) + log10(A) + + + + + + + + diff --git a/etc/descriptions/global_mlh.xml b/etc/descriptions/global_mlh.xml new file mode 100644 index 0000000..87146bc --- /dev/null +++ b/etc/descriptions/global_mlh.xml @@ -0,0 +1,50 @@ + + + + global + + The MLh plugin (previously MLsed) is designed to compute amplitudes + and magnitudes according to the Swiss Seismological Service (SED) + standards. + + + + + The MLh plugin (previously MLsed) is designed to compute amplitudes + and magnitudes according to the Swiss Seismological Service (SED) + standards. It is a modified version of the gempa ML plugin developed + at the Liverpool developer meeting. + + + + + + + Define combiner operation for both horizontals (min, max, avg). + + + + + MLh clipping level, in raw counts, eg. 80% of 2^23 = 6710886. + + + + + + + + + Defines attenuation parameters for MLh. + Format: "UpToKilometers A B; UpToNextKilometers A B;". + Example: "30 nomag; 60 0.018 2.17; 700 0.0038 3.02". + The first parameter set "30 nomag" means that up to 30km + from the sensor the magnitude should not be calculated. + + Note: No MLh computation if params is empty. + + + + + + + diff --git a/etc/descriptions/global_mlr.xml b/etc/descriptions/global_mlr.xml new file mode 100644 index 0000000..65ff6e1 --- /dev/null +++ b/etc/descriptions/global_mlr.xml @@ -0,0 +1,33 @@ + + + + global + + The GNS/Geonet local magnitude + + + + + + + + Parameter for computing MLr magnitudes for GNS/Geonet from MLv amplitudes. + + + + Defines Stations Corrections parameters for MLr (GNS/Geonet Local magnitude). + Format: "UpToKilometers A ; UpToNextKilometers A ;". + Example: "30 nomag; 60 0.018 ; 700 0.0038 ". + The first parameter set "30 nomag" means that up to 30km + from the sensor the magnitude should not be calculated. + A is used as station correction. + "nomag" is used to disable station magnitudes. + + Note: No MLr computation if params is empty. + + + + + + + diff --git a/etc/descriptions/global_mlv.xml b/etc/descriptions/global_mlv.xml new file mode 100644 index 0000000..49f0ca3 --- /dev/null +++ b/etc/descriptions/global_mlv.xml @@ -0,0 +1,102 @@ + + + + global + + Local (Richter) magnitude measured on the vertical component + + + + + + Regional calibration parameters for MLv. The region itself is defined + by another magnitude-type MLv profile. + + + + Add one profile for every region. The profile name + equals the name of a polygon configured in the BNA file + of the Magnitude-type profile. The Magnitude-type profile + and the polygon must exist. + The special name "world" corresponds to the + region of the entire planet as a fallback. + + + + + Overrides the calibration function log10(A0) + for computing MLv per region. See logA0 + description in the bindings. + + + + + + + + + + + Local (Richter) magnitude measured on the vertical component + + + + + + Parameters for measuring MLv amplitudes. Add more parameters + by adding an amplitude type profile 'MLv', + + + + The filter applied to raw records before applying + Wood-Anderson simulation. Default: no pre-filtering. + + + + + This parameter allows to set how the amplitude is measured. + Either by finding the absolute maximum of the demeaned + trace (AbsMax), the difference of maximum and minimum of + the signal window (MinMax) or the maximum peak-trough + of one cycle (PeakTrough). + + Note that if absMax is already explicitly configured, this + parameter has no effect. + + + + + + + + Parameters for computing MLv magnitudes from MLv amplitudes. + + + + The calibration function log10(A0). + + Format: any list of distance-value pairs separated by + comma. Values within pairs are separated by colon. + + Example: "0:-1.3,60:-2.8,100:-3.0,400:-4.5,1000:-5.85" + specifies 4 distance intervals from + 0...60, 60...100, 100...400 and 400...1000 km distance. + Within these intervals log10(A0) is interpolated linearly + between -1.3...-2.8, -2.8...-3.0, -3.0...-4.5 and -4.5...-5.8, + respectively. + + Note: The first and last distance samples limit the + maximum distance range for computing MLv. + + + + + Maximum epicentral distance for computing MLv. + No distance limitation for maxDistanceKm=-1 + + + + + + + diff --git a/etc/descriptions/global_mn.xml b/etc/descriptions/global_mn.xml new file mode 100644 index 0000000..470883b --- /dev/null +++ b/etc/descriptions/global_mn.xml @@ -0,0 +1,169 @@ + + + + global + + Nuttli magnitude for Canada and other Cratonic regions + + + + + + Amplitude control parameters for MN (Nuttli magnitude). + + + + The travel time table set compiled for LocSAT. The tables + are located in "share/locsat/tables/[vmodel].*". + + + + + + + + Regionalization of MN (Nuttli magnitude). + + + + The path to the BNA file which defines the valid region + for the MN magnitude. Note that the entire path from + source to receiver must lie entirely within the polygon(s). + + + + + + + + + + + + + + Parameters for measuring AMN amplitudes. + + + + Whether to use RMS ratio of signal and noise window for + SNR computation or the ration of the peak-trough + amplitudes of either window. + + + + + The configurable filter such that the V measurement is + made on a filtered trace. By default, filtering is not + enabled. + + See https://docs.gempa.de/seiscomp/current/base/filter-grammar.html + for how to specify the filter. + + + + + The minimum phase velocity used to determine the signal + window end. + + + + + The maximum phase velocity used to determine the signal + window start. + + + + + The length of the SNR window. + + + + + The offset of the noise window. A positive value will move + the computed noise window to the left on the time axis, + a negative value will move it to the right on the time axis. + + + + + The priority list of phase onsets to compute the signal + start window. Except for Vmin and Vmax, associated phases + (arrivals) must be present in the origin for this + particular phase. Picked phases are only considered if + the origin is a manual origin or the pick is a + manual pick. The first value in the list which can be + retrieved or computed, is selected. + + Allowed tokens: Pg, Pn, P, Sg, Sn, S, Lg, Rg, Vmin, Vmax + + + + + The priority list of phase onsets to compute the signal + end window. Except for Vmin and Vmax, associated phases + (arrivals) must be present in the origin for this + particular phase. Picked phases are only considered if + the origin is a manual origin or the pick is a + manual pick. The first value in the list which can be + retrieved or computed, is selected. + + Allowed tokens: Pg, Pn, P, Sg, Sn, S, Lg, Rg, Vmin, Vmax + + + + + + + + Parameters for computing MN magnitudes from AMN amplitudes. + + + + The minimum SNR required for a magnitude to pass + the QC check. The station magnitude will be computed + anyway but if the SNR is below this threshold it will + be associated with weight zero and will not contribute + to the network magnitude. + + + + + The minimum period required for a magnitude to pass + the QC check. The station magnitude will be computed + anyway but if the period is below this threshold it will + be associated with weight zero and will not contribute + to the network magnitude. + + + + + The maximum period allowed for a magnitude to pass + the QC check. The station magnitude will be computed + anyway but if the period is above this threshold it will + be associated with weight zero and will not contribute + to the network magnitude. + + + + + The minimum distance required for a magnitude to pass + the QC check. The station magnitude will be computed + anyway but if the distance is below this threshold it + will be associated with weight zero and will not contribute + to the network magnitude. + + + + + The maximum distance allowed for a magnitude to be + computed. If the distance exceeds this threshold then + the computation will be canceled and no station + magnitude will be available at all. + + + + + + + diff --git a/etc/descriptions/global_ms_20.xml b/etc/descriptions/global_ms_20.xml new file mode 100644 index 0000000..c6f4b9d --- /dev/null +++ b/etc/descriptions/global_ms_20.xml @@ -0,0 +1,49 @@ + + + + global + + Surface wave magnitude measured at around 20 s + + + + + The surface wave magnitude measured on the vertical component + at around 20 s period. + + + + + + Parameters for computing Ms_20 magnitudes from Ms_20 amplitudes. + + + + Lower period limit of the signal for computing Ms_20. + + + + + Upper period limit of the signal for computing Ms_20. + + + + + Minimum epicentral distance for computing Ms_20. + + + + + Maximum epicentral distance for computing Ms_20. + + + + + Maximum depth for computing Ms_20. + + + + + + + diff --git a/etc/descriptions/global_nonlinloc.xml b/etc/descriptions/global_nonlinloc.xml new file mode 100644 index 0000000..1afed79 --- /dev/null +++ b/etc/descriptions/global_nonlinloc.xml @@ -0,0 +1,155 @@ + + + + global + + NonLinLoc locator wrapper plugin for SeisComP. + NonLinLoc was written by Anthony Lomax (http://alomax.free.fr/nlloc). + + + + + + PublicID creation pattern for an origin created by NonLinLoc. + + + + + + Defines the output path for all native NonLinLoc input and output files. + + + + + + Save input files *.obs in outputPath for later processing. + Setting to false reduces file i/o and saves disk space. + + + + + + Save output files in outputPath for later processing or + for viewing by the Seismicity Viewer. + Setting to false reduces file i/o and saves disk space. + + + + + + The default NonLinLoc control file to use. + + + + + + The default pick error in seconds passed to NonLinLoc if a SeisComP pick + object does not provide pick time uncertainties. + + + + + + Since NLL does not support fixing the depth natively so this + feature is emulated by settings the Z grid very tight around + the depth to be fixed. This value sets the Z grid spacing. + + + + + + Picks from stations with missing configuration will be + ignored. The origin will be relocated without that pick + if possible. + + If set to false, the plug-in throws + an excepection without locating. + + + + + + Defines a list of active profiles to be used by the plugin. + + + + + + + Defines a regional profile that is used if a prelocation falls + inside the configured region. + + + + earthModelID that is stored in the created origin. + + + + + methodID that is stored in the created origin. + + + + + Path to travel time tables (grids). + + + + + Format of the station name used to select the right travel time table (grid) file + for a station. + By default only the station code is used (e.g. tablePath.P.@STA@.time.*), but + that doesn't allow to distinguish between multiple network codes or location codes + that use the same station code. + To overcome this limitation this parameter could be set in a more general way, for + example @NET@_@STA@_@LOC@. In this way NonLinLoc will look for + travel time table (grid) files of the form: tablePath.P.@NET@_@STA@_@LOC@.time.* + Where @NET@ @STA@ @LOC@ are just placeholder for the actual codes + + + + + Control file of the current profile. If not set, the default + control file will be used instead. + + + + + Transformation type of the configured region. Supported are + SIMPLE and GLOBAL. + + Default: GLOBAL is assumed. + + + + + Defines the 4 corner values of the epicentral region for selecting the profile. + The original epicentre must be within the region. + + If transform is GLOBAL: min_lat, min_lon, max_lat, max_lon. + The values define the geographic corner coordinates. Unit is degree. + + If transform is SIMPLE: xmin, ymin, xmax, ymax. + The values define the region relative to the configured origin. + Unit is km. + + + + + Only used for transformation SIMPLE. Expects 2 values: latitude, longitude. + The value define the geographic origin of the area spanned by region. + Unit is degree. + + + + + Only used for transformation SIMPLE. Defines the rotation around the + origin of the defined region. + + + + + + + + diff --git a/etc/descriptions/import_inv.xml b/etc/descriptions/import_inv.xml new file mode 100644 index 0000000..ad0d8dd --- /dev/null +++ b/etc/descriptions/import_inv.xml @@ -0,0 +1,49 @@ + + + + Import inventory information from various sources. + + + + + + + import_inv [FORMAT] input [output] + import_inv help [topic] + + + The first form takes the format as first parameter and the input + and output location. The input location is either a file or directory + depending on the format and its converter. If the output is not + given it defaults to seiscomp/etc/inventory/{input}.xml. To write + the output to stdout, "-" must be used. + + The second form provides help on a particular topic. The only topic + currently supported is "formats" which prints all available input + formats. + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + diff --git a/etc/descriptions/inv2dlsv.xml b/etc/descriptions/inv2dlsv.xml new file mode 100644 index 0000000..bb6753a --- /dev/null +++ b/etc/descriptions/inv2dlsv.xml @@ -0,0 +1,16 @@ + + + + Converts SC3 inventory XML to dataless SEED. + + + inv2dlsv [in_xml [out_dataless]] + + + If in_xml is not given, stdin is used. If out_dataless is not given, + stdout is used. + + + + + diff --git a/etc/descriptions/invextr.xml b/etc/descriptions/invextr.xml new file mode 100644 index 0000000..bbeeef5 --- /dev/null +++ b/etc/descriptions/invextr.xml @@ -0,0 +1,70 @@ + + + + Extract channels from inventory. + + + + + + + invextr [OPTIONS] [input=stdin] [output=stdout] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#print-config-vars + generic#validate-schema-params + generic#dump-settings + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + verbosity#print-context + verbosity#print-component + verbosity#log-utc + verbosity#trace + + + + + + + + + + + diff --git a/etc/descriptions/kernel-messaging.xml b/etc/descriptions/kernel-messaging.xml new file mode 100644 index 0000000..2ce6475 --- /dev/null +++ b/etc/descriptions/kernel-messaging.xml @@ -0,0 +1,30 @@ + + + + kernel + + SeisComP messaging component that enables communication of modules over the network. + + + + + + Enables/disables local messaging (scmaster). + The messaging component is an integral component + of all modules (except e.g. acquisition modules). + If you are not sure what to do, enable it. + + + + + Defines the messaging unencrypted bind address. If + left empty then the configuration file (see scmaster) + will be used instead. Use this to override the + unencrypted bind address. The format is + "[ip:]port". + + + + + + diff --git a/etc/descriptions/kernel.xml b/etc/descriptions/kernel.xml new file mode 100644 index 0000000..53afc39 --- /dev/null +++ b/etc/descriptions/kernel.xml @@ -0,0 +1,14 @@ + + + + SeisComP kernel. + + + + Sends all logging output to the syslog backend which logs + usually to /var/log/messages. + + + + + diff --git a/etc/descriptions/msrtsimul.xml b/etc/descriptions/msrtsimul.xml new file mode 100644 index 0000000..9137c4e --- /dev/null +++ b/etc/descriptions/msrtsimul.xml @@ -0,0 +1,61 @@ + + + + MiniSEED real time playback and simulation + + + msrtsimul [OPTION] miniSEED-file + + + + + + + + + + + + + + + diff --git a/etc/descriptions/ql2sc.xml b/etc/descriptions/ql2sc.xml new file mode 100644 index 0000000..09ddfbb --- /dev/null +++ b/etc/descriptions/ql2sc.xml @@ -0,0 +1,186 @@ + + + + QuakeLink (gempa GmbH) to SeisComP event parameter exchange. + + + + Number of seconds to fetch missed updates on start up. + + + + + Number of public objects to cache. + + + + + Maximum number of notifiers to batch in one message. If set + to 0 no size limit is enforced. Make sure to not hit the + overall message size limited of 16MiB which is enforced by + the messaging system. + + + + + If event synchronisation is enabled and an incoming origin + is not yet associated with an event on the target machine, + then this timeout defines the maximum number of seconds to + wait for an association. + + + + + Registration of the host profiles defining the connection + parameters to the QuakeLink hosts. + + + + + Definition of host profiles. For each host profile a connection + to one QuakeLink server can established. The profiles must be registered + in 'hosts' to apply them. + + + + Provide the connection parameters to one QuakeLink server. + + + + URL of the QuakeLink service, the scheme 'qls' enables SSL. + + Format: [ql[s]://][user:pwd@][host][:port]. + + If set to an empty string the application will run without any QuakeLink connection attempt. + + + + + Enable/disable GZip (GNU zip) compression. + + + + Request native data instead of XML format. + Native data export may be disabled on some hosts. + + + + Try to update the event attributes of the target event + with the attributes of the source event which includes + event type and event certainty. It will not import + events but tries to find the associated event of the + input preferred origin at the target system and will + update the event attributes via journaling. + + + + + Specify the XML components to fetch. + Note: These options are not used if 'native' data is requested. + + + Include picks + + + Include amplitudes + + + Include origin arrivals + + + Include origin station magnitudes + + + Include moment tensor station contributions and phase settings + + + Include only preferred origin and magnitude information + + + + + Request server to send keep alive message every 30s to + prevent connection reset by firewalls on long idle + periods. If activated the client will reset the + connection if no alive message is received within 60s. + + + + + SQL like WHERE clause to filter the result set. + + clause := condition[ AND|OR [(]clause[)]] __ + condition := MAG|DEPTH|LAT|LON|PHASES|OTIME|UPDATED [op float|time]|[IS [NOT] NULL] __ + op := =|>|>=|<|<=|eq|gt|ge|lt|ge __ + time := %Y,%m,%d[,%H,%M,%S,%f] + + + + + Map datamodel class names to messaging groups. For unmapped objects + the mapping of their parent objects is evaluated recursively. Objects + may be excluded by mapping them to 'NULL'. + + + + + + + + + Defines a whitelist of publicID prefixes that are + allowed for processing. Separate items by comma. + + + + + + + Defines a blacklist of publicID prefixes that are + not allowed for processing. Separate items by comma. + + + + + + + + ql2sc [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#print-component + verbosity#print-context + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#trace + verbosity#log-file + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + database#db-driver-list + database#database + + + + diff --git a/etc/descriptions/scalert.xml b/etc/descriptions/scalert.xml new file mode 100644 index 0000000..84effa7 --- /dev/null +++ b/etc/descriptions/scalert.xml @@ -0,0 +1,187 @@ + + + + Real time alert template. + + + + Treat an event as new event when it is seen the first time. + + + + + List of agency IDs to consider picks and origins. The agency ID + is extracted from the pick or the preferred origin of the event + and compared with the configured IDs. + + unset (=): use agencyID defined in global, default + + empty list (=""): all agencies are allowed + + + + + + The default message string for the event-script is + "earthquake, [HOURS] hours [MINS] minutes ago, [DESC], + magnitude [MAG], depth [DEP] kilometers" whereas [DESC] + is the string given in the event.description attribute. This + string can be overwritten using one of the following options. + There are three placeholders that can be used: @region@, + @dist@ and @poi@. + Example: "@region@, @dist@ kilometers from @poi@ + away". + + + + + When using the nearest point of interest (city) as part of + the message string, specify the maximum distance in degrees + from the event. Any point of interest farther away will be + ignored. + + + + + Minimum population for a city to become a point of interest. + + + + + + + The script to be called when a pick + arrives. Network-, station code, pick publicID are passed + as parameters $1, $2, $3. + + + + + The script to be called when an amplitude + arrives. Network-, station code, amplitude and amplitude + public ID are passed as parameters $1, $2, $3, $4. + + + + + The script to be called when a preliminary + origin arrives. Latitude and longitude are passed as + parameters $1 and $2. + + + + + The script to be called when an event has been + declared. The message string, a flag (1=new event, + 0=update event), the EventID, the arrival count and the + magnitude (optional when set) are passed as parameters + $1, $2, $3, $4 and $5. + + + + + Constraints for executing scripts + + + Start the pick script only when the phaseHint of the + received pick has one of the value(s). + + + + + Start the pick script only when the stream (NET.STA.LOC.CHA) + of the received pick belongs to the list of stream IDs. If empty, + all picks are accepted, otherwise only the ones whose stream ID + matches one of the entry of this comma separated list. Each entry + must follow the NET.STA.LOC.CHA format, but the special + characters ? * | ( ) are also accepeted. + E.g. "CH.*,GR.STA??.*,*.*.*.HH?,*.*.*.??(Z|1)" + + + + + Start the pick script only when a minimum number of phases + 'phaseNumber' is received within 'phaseInterval'. + + + + + Start the pick script only when a minimum number of phases + 'phaseNumber' is received within 'phaseInterval'. + + + + + + + scalert [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + + + + + + + + + + + + diff --git a/etc/descriptions/scamp.xml b/etc/descriptions/scamp.xml new file mode 100644 index 0000000..4551fc4 --- /dev/null +++ b/etc/descriptions/scamp.xml @@ -0,0 +1,135 @@ + + + + Calculates amplitudes on basis of incoming origins and the associated picks. + + + Definition of magnitude types for which amplitudes are to be calculated. + + + + The minimum arrival weight within an origin to compute amplitudes for the associated pick. + + + Timeout in seconds of the first data packet of waveform data acquisition. + + + Timeout in seconds of any subsequent data packet of waveform data acquisition. + + + + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + records#record-driver-list + records#record-url + records#record-file + records#record-type + + + + + + + + + + Amplitudes can be re-processed, e.g. in order to consider inventory + changes. The re-processing is time-window based and triggerd by + setting start-time or end-time. Re-processing will either create + output to stdout in XML format. Using --commit will send the + updated amplitudes to the messaging system. Waveform access is + required. + + + + + + + + + diff --git a/etc/descriptions/scardac.xml b/etc/descriptions/scardac.xml new file mode 100644 index 0000000..9c51445 --- /dev/null +++ b/etc/descriptions/scardac.xml @@ -0,0 +1,99 @@ + + + + Waveform archive data availability collector. + + + + Path to MiniSeed waveform archive where all data is stored. The SDS archive + structure is defined as + YEAR/NET/STA/CHA/NET.STA.LOC.CHA.YEAR.DATEOFYEAR, e.g. + 2018/GE/APE/BHZ.D/GE.APE..BHZ.D.2018.125 + + + + + Number of threads scanning the archive in parallel. + + + + + Batch size of database transactions used when updating data + availability segments. Allowed range: [1,1000]. + + + + + Acceptable derivation of end time and start time of successive + records in multiples of sample time. + + + + + + Maximum number of segments per stream. If the limit is reached + no more segments are added to the database and the corresponding + extent is flagged as to fragmented. Use a negative value to + disable any limit. + + + + + + + scardac [OPTION]... + + + + + generic#help + generic#version + generic#config-file + generic#plugins + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#print-component + verbosity#print-context + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#trace + verbosity#log-file + + + + + + + diff --git a/etc/descriptions/scart.xml b/etc/descriptions/scart.xml new file mode 100644 index 0000000..1293b72 --- /dev/null +++ b/etc/descriptions/scart.xml @@ -0,0 +1,166 @@ + + + + + Import/export MiniSEED data to/from SDS archives. + + + + scart [options] {archive-dir} + + + The last option has to be the archive directory when dump mode is enabled. + When no archive directory is explicitly given, + $SEISCOMP_ROOT/var/lib/archive or the current directory + is used depending on whether $SEISCOMP_ROOT has been set or not. + The default operation mode is import. That means that a multiplexed + MiniSEED file or another record source such as ArcLink is used to import + records into a SDS structure. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/etc/descriptions/scautoloc.xml b/etc/descriptions/scautoloc.xml new file mode 100644 index 0000000..27adcb7 --- /dev/null +++ b/etc/descriptions/scautoloc.xml @@ -0,0 +1,431 @@ + + + + Locates seismic events. + + + + Define parameters of the locator. Only LOCSAT is supported. + + + + The locator profile to use. + + + + + For each location, scautoloc performs checks to test if the + depth estimate is reliable. If the same location quality + (e.g. pick RMS) can be achieved while fixing the depth to + the default depth, the latter is used. This is most often + the case for shallow events with essentially no depth + resolution. + + + + + The locator might converge at a depth of 0 or even negative + depths. This is usually not desired, as 0 km might be + interpreted as indicative of e.g. a quarry blast or another + explosive source. In the case of "too shallow" locations the + minimum depth will be used. + + Note that the minimum depth can also be configured in scolv, + possibly to a different value. + + + + + + Control the buffer of objects. + + + + Time to keep picks in the buffer with respect to pick time, not creation time. + + + + + Time to keep origins in buffer. + + + + + Clean-up interval for removing old/unused objects. + + + + + + Define parameters for creating and reporting origins. + + + + Maximum travel-time RMS for a location to be reported. + + + + + Maximum travel-time residual (unweighted) for a pick at a station to be used. + + + + + Minimum number of phases for reporting origins. + + + + + Maximum permissible depth for reporting origins. + + + + + Maximum secondary azimuthal gap for an origin to be reported by. + The secondary gap is the maximum of the sum of 2 station gaps. + + Default: 360 degrees, i.e. no restriction based on this parameter. + + + + + Maximum epicntral distance to stations for accepting picks. + + + + + If the station count for stations at < 105 degrees distance + exceeds this number, no picks at > 105 degrees will be + used in location. They will be loosely associated, though. + + + + + If this string is non-empty, an amplitude obtained from an amplitude + object is used by ... . If this string is "mb", a period + obtained from the amplitude object is also used; if it has some other + value, then 1 [units?] is used. If this string is empty, then the amplitude + is set to 0.5 * thresholdXXL, and 1 [units?] is used for the period. + + + + + If this string is non-empty, it is used to obtain a pick SNR from an + amplitude object. If it is empty, the pick SNR is 10. + + + + + Location of the grid file for nucleating origins. + + + + + Location of the station configuration file for nucleating origins. + + + + + The station file to be used when in offline mode. + If no file is given the database is used. An example is given + in "@DATADIR@/scautoloc/station-locations.conf". + + + + + Receive and process manual phase picks. + + + + + Receive and process manual origins. Manual picks and arrival + weights will be adopted from the manual origin and the processing continues with these. + Origins produced this way by adding incoming automatic picks are nevertheless marked as + automatic origins. But they may contain manual picks (even pP and S picks). + + Add the LOCATION group to connection.subscriptions for receiving manual origins! + + This is an experimental feature relevant only for large regional and global networks, + where results by analysts can be expected before the end + of automatic event processing. + + + + + Adopt the depth from manual origins. Otherwise the default depth + in locator.defaultDepth is considered. + + + + + Pick processing may be enabled/disabled according to the + author ID of a pick. In addition, picks of certain authors + can be prioritized over other authors. + This is the author priority list that controls this behavior. + Its value is a comma-separated list of author ID's. + The earlier an author ID appears in the list the higher the + priority it gets. + + + + + Compare located origin with the origin at the depth given by + locator.defaultDepth. The origin with lower RMS is reported. + + + + + Parameter "a" in the equation t = aN + b. + t is the time interval between sending updates of an origin. + N is the arrival count of the origin. + + + + + Parameter "b" in the equation t = aN + b. + t is the time interval between sending updates of an origin. + N is the arrival count of the origin. + + + + + Activate for writing pick log files to "pickLog". + + + + + Location of pick log file containing information about received + picks. Activate "pickLogEnable" for writing the files. + + + + + + Create origins from XXL picks. These origins will receive the status "preliminary". + Use with care! Enabling XXL picks may result in frequent fake solutions. + + + + Picks with exceptionally large amplitudes may be flagged as XXL, + allowing (in future) faster, preliminary "heads-up" alerts. + + This option enables the feature. + + + + + Minimum amplitude for a pick to be flagged as XXL. The + value corresponds to the amplitude type configured in + autoloc.amplTypeAbs. NOTE that + BOTH minAmplitude and minSNR need to be exceeded! + + + + + Minimum SNR for a pick to be flagged as XXL. NOTE that + BOTH minAmplitude and minSNR need to be exceeded! + + + + + Minimum number of XXL picks for forming an origin. + Must be >= 4. + + + + + Maximum epicentral distance for accepting XXL picks. + + + + + Maximum depth for creating origins based on XXL arrivals. + + + + + + + + scautoloc [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/etc/descriptions/scautopick.xml b/etc/descriptions/scautopick.xml new file mode 100644 index 0000000..01e466e --- /dev/null +++ b/etc/descriptions/scautopick.xml @@ -0,0 +1,360 @@ + + + + Phase detection and picking on waveforms. + + + + Defined the record ringbuffer size in seconds. + + + + + The leadTime defines the time in seconds to start picking on + waveforms before current time. + + + + + If enabled, picks can be made on waveforms which are older than + current time - "leadTime". Current time is the time + when the module was started. This allows to pick + historic data in real-time playbacks which are preserving the + record times. See e.g. the "msrtsimul" module. + This option deactivates "leadTime". Activate only for playbacks. + + + + + The initTime defines a time span in seconds for that the picker + is blind after initialization. This time is needed to initialize + the filter and depends on it. + + + + + Interpolate gaps linearly? This is valid for gaps shorter + than thresholds.maxGapLength. + + + + + If enabled, all streams that are received by the picker are + used for picking. This option has only effect if a + file is used as input which contains more data than the + picker requests. If connected to a waveform server such as + SeedLink, the picker will only receive the data it is + subscribed to. + + + + + The default filter used for making detections. Station-specific + configurations (bindings) override this value. + + + + + The time correction applied for a pick. Station-specific + values (bindings) override this value. + + + + + The re-picker to use. By default only simple detections + are emitted as picks. To enable re-picking on a time window around + the detection, an algorithm (plugin) can be defined with this parameter. + + Currently available: "AIC", "BK" or + "GFZ". + + More options may be available by plugins. Configure related + parameters in global bindings. + + + + + Phase hint to be assigned to the pick made by the primary picker. + + + + + If enabled and "picker" is configured, then + initial detections are sent as well. To distinguish between + detections and picks the evaluation status of the pick is + set to "rejected". This is meant to be a debug + option which can be used to compare detections and picks by + their evaluation status. + + + + + The secondary picker to use, e.g., for picking S-phases. + Currently available is: "S-L2". More options may + be available by plugins. Configure related parameters + in global bindings. + + + + + If enabled, all secondary pickers that were triggered by + a previous pick will be terminated when a new detection or + pick has been found. This aims to avoid the case where an + S phase is wrongly picked as P but would also be picked as + S by the secondary picker. But suppressing the S pick can + lead to undesired results. It might be better in some + situations to have two picks (P and S) instead only a wrong P. + + + + + If enabled and "picker" or "spicker" is + configured, extra comments will be added to the resulting pick. + + Supported comments: + + SNR: added if SNR >= 0, comment id is "SNR" + + + + + Configures the feature extraction type to use. Currently + available: "DFX". Configure related parameters + in global bindings. + + When configured, the usability of the features for locating + depends on the used locator, e.g. LOCSAT. Read the + locator's documentation and configuration parameters. + + + + + The amplitude types to be computed by the picker based on + picks. + + + + + Threshold parameters for the primary picker. + + + + For which value on the filtered waveforms is a pick + detected. Station specific values override this value. + + + + + The value the filtered waveforms must reach to enable + detection again. Between triggerOn and triggerOff the + picker is blind and does not produce picks. Station + specific values override this value. + + + + + The maximum gap length in seconds to handle. + Gaps larger than this will cause the picker to be reset. + + + + + The time window used to compute a maximum (snr) amplitude + on the filtered waveforms. + + + + + The time used together with measured amplitude and + `thresholds.minAmplOffset` for scaling the amplitude below which + the picker is inactive after a P pick. Read the documentation! + + + + + The amplitude used together with measured amplitude and + `thresholds.deadTime` for scaling the amplitude below which + the picker is inactive after a P pick. The value is typically + similar to the trigger threshold. Read the documentation! + + + + + + + Configure a list of magnitude types. + Update and send amplitudes for these magnitudes as soon as data are + available. Do not wait for complete time windows. + Only magnitudes computed by scautopick as given by the amplitudes parameter are considered. + This option is for rapid magnitude estimation and EEW. + + WARNING: This option increases the load on the system! + + + + + + + Message group for sending amplitudes to. + + + + + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + + + + + records#record-driver-list + records#record-url + records#record-file + records#record-type + + + + + + + + + + + + + + + + + + + + + Configures a station for picking. A station without a binding assigned + will not be picked unless the picker is in offline mode. + + + + + Enables/disables picking on a station. + + + + + Defines the filter to be used for picking. + + + + + For which value on the filtered waveform is a pick detected. + + + + + The value the filtered waveform must reach to + enable a detection again. + + + + + The time correction applied to a pick. + + + + + Defines whether the detector applies sensitivity correction + (applying the gain) or not in advance to filter the data. + + + + + diff --git a/etc/descriptions/scbulletin.xml b/etc/descriptions/scbulletin.xml new file mode 100644 index 0000000..e946d97 --- /dev/null +++ b/etc/descriptions/scbulletin.xml @@ -0,0 +1,131 @@ + + + + + Create bulletins from SCML. + + + + scbulletin [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/etc/descriptions/scchkcfg.xml b/etc/descriptions/scchkcfg.xml new file mode 100644 index 0000000..a1e46cb --- /dev/null +++ b/etc/descriptions/scchkcfg.xml @@ -0,0 +1,11 @@ + + + + Checks a module configuration. + + + scchkcfg {mod-name} [standalone] + + + + diff --git a/etc/descriptions/scdb.xml b/etc/descriptions/scdb.xml new file mode 100644 index 0000000..defca7f --- /dev/null +++ b/etc/descriptions/scdb.xml @@ -0,0 +1,115 @@ + + + + + Populate a SQL database from XML files or messages. + + + + + + Define the group on scmaster to subscribe for database + requests. + + + + + Define the group on scmaster to send database response + messages to. + + + + + + + Define the output database connection type. + + + + + Define the output database connection parameters. + + + + + + + scdb [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#config-db + + + + + + + + + + diff --git a/etc/descriptions/scdbstrip.xml b/etc/descriptions/scdbstrip.xml new file mode 100644 index 0000000..ccec99e --- /dev/null +++ b/etc/descriptions/scdbstrip.xml @@ -0,0 +1,138 @@ + + + + Clean up a database from event and waveform quality parameters. + + + + + + Delete all parameters after the specified time period, + not before. + + + + + Strip all event parameters including events, origins, + magnitudes, amplitudes, arrivals, picks, focal mechanisms. + + + + + Strip waveform quality control (QC) parameters. + + + + + Parameters controlling the time to keep objects in the database. + The time comparison considers the object time, not the time of + their creation. + + + + The number of days to preserve in the database. This + value is added to the whole timespan. Hours + and minutes are configured separately. + + + + + The number of hours to preserve in the database. This + value is added to the whole timespan. Days + and minutes are configured separately. + + + + + The number of minutes to preserve in the database. This + value is added to the whole timespan. Days + and hours are configured separately. + + + + + + + + + scdbstrip [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + + + + + + + + + + + + + + + + diff --git a/etc/descriptions/scdispatch.xml b/etc/descriptions/scdispatch.xml new file mode 100644 index 0000000..be18d99 --- /dev/null +++ b/etc/descriptions/scdispatch.xml @@ -0,0 +1,108 @@ + + + + + Read objects (event, origin, etc) from a SCML file and sends the objects + to the messaging system. + + + + scdispatch [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + + + + + + + + + + diff --git a/etc/descriptions/scdumpcfg.xml b/etc/descriptions/scdumpcfg.xml new file mode 100644 index 0000000..4610773 --- /dev/null +++ b/etc/descriptions/scdumpcfg.xml @@ -0,0 +1,67 @@ + + + + + Dump bindings or module configurations used by a specific module or global + for particular stations. + + + + scdumpcfg [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#config-db + + + + + + + + + + + diff --git a/etc/descriptions/scesv.xml b/etc/descriptions/scesv.xml new file mode 100644 index 0000000..1cc90d0 --- /dev/null +++ b/etc/descriptions/scesv.xml @@ -0,0 +1,305 @@ + + + + Event summary view. + + + + Number of days to preload if scesv is started. + + + + + If enabled, the last automatic solution is displayed next to the + current solution in the summary panel. If the last automatic + solution differs from the current solution it is displayed + in red. If both solutions match it is displayed in gray. + + + + + If enabled, only the most recent event is shown even if an update of + an older event is being received. + + + + + If enabled, the map is centered around the most recent event event. + + + + + If enabled, an additional button is displayed which allows to + switch back the latest automatic solution. This button triggers + an command for scevent to prioritize automatic solutions until + the next manual solution is available. + + + + + A list of magnitude types to be displayed. + + + + + Label of button0 which triggers script0. + + + + + Label of button1 which triggers script1. + + + + + + Path to a script if button0 is clicked. + + + + + + If enabled, the current map is exported to file. + The filename is appened to the parameter list of script0. + The script has to take ownership of the file. + + + + + If enabled, the parameter list of script0 is event ID, + arrival count, magnitude, description. + + If disabled, the parameter list of script0 is event ID, + preferredOriginID, preferredMagnitudeID, + preferredFocalMechanismID. + + + + + + Path to a script if button1 is clicked. + + + + + + If enabled, the current map is exported to file. + The filename is appened to the parameter list of script1. + The script has to take ownership of the file. + + + + + If enabled, the parameter list of script1 is event ID, + arrivalCount, magnitude, description. + + If disabled, the parameter list of script1 is event ID, + preferredOriginID, preferredMagnitudeID, + preferredFocalMechanismID. + + + + + + + + Draw borders in the summary panel. + + + + + + If the event type is either "other" or + "not existing" and this flag is true then the + event will not be shown. + + + + + Adjust content or display custom information in the Summary tab. + + + + Minimum longitude of initially displayed map region. + + + + + Maximum longitude of initially displayed map region. + + + + + Minimum latitude of initially displayed map region. + + + + + Maximum latitude of initially displayed map region. + + + + + Event information + + + + + ID of the event comment to be considered. + + + + + Value to be shown in case no valid event comment is + found. + + + + + Label of the value to be shown. + + + + + + + Origin information + + + + Display origin comments. + + + + ID of the origin comment to be considered. + + + + + Value to be shown in case no valid origin comment is + found. + + + + + Label of the value to be shown. + + + + + + + + Display information related to a point of interest (POI) + read from the cities XML file. + + + + Maximum distance in degrees of a POI to be taken into account. + + + + + The minimum population of a POI to be taken into account. + + + + + Message conversion string that converts a POI into the text + displayed under the region label. There are different + placeholders that can be used: @dist@, @dir@, @poi@ and @region@. + + + + + + + scesv [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + generic#print-config-vars + generic#validate-schema-params + generic#dump-settings + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + verbosity#print-context + verbosity#print-component + verbosity#log-utc + verbosity#trace + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + records#record-driver-list + records#record-url + records#record-file + records#record-type + + + + cities#city-xml + + + + gui#full-screen + gui#non-interactive + + + + + + + + + + diff --git a/etc/descriptions/scevent.xml b/etc/descriptions/scevent.xml new file mode 100644 index 0000000..19bb039 --- /dev/null +++ b/etc/descriptions/scevent.xml @@ -0,0 +1,431 @@ + + + + + Associates an Origin to an Event or forms a new Event if no match is found. + Selects the preferred origin, magnitude and focal mechanism. + + + Prefix for all Event IDs + + + + Defines the pattern to generate an event ID. + + %p : prefix + + %Y : year + + %[w]c: alpha character + + %[w]C: upper case alpha character + + %[w]d: decimal + + %[w]x: hexadecimal + + %[w]X: upper case hexadecimal + + [w] is an optional width parameter. + + + + + Configures the number of event ID slots to look back and + forth when an event ID is already taken. The default in + previous versions was 5. Now -1 means that the margin is + determined automatically based on "eventAssociation.eventTimeBefore" + and "eventAssociation.eventTimeAfter". According to the + configured "eventIDPattern" a fixed time range per slot + can be computed and with that width the number of look + ahead slots and look back slots can be computed based on + the given time ranges for event association. + + + + + If enabled then the EventDescription with type + 'Flinn-Engdahl region' will be populated with the + Flinn-Engdahl region name. + + + + + + + Defines a blacklist of event ids. The items of this list + are only matches against %c, %C, %d, %x and %X of the + eventIDPattern description. Year (%Y) and prefix (%p) are + not matched. The match is case-sensitive, so blacklisting + e.g. abcd would only match in combination with %c. If %C + is used ABCD has to be blacklisted. + + + + + + + Criteria defining if Origins are associated to an event + and which Origins and magnitudes become preferred. + + + + + Minimum number of Picks for an Origin that is automatic and cannot be + associated with an Event to be allowed to form an new Event. + + + + + + Minimum score of an automatic Origin to be allowed to + form an new Event. This requires an activated score + plugin and a score processor. Configure "score" + for defining the score processor and the score processor + parameters. If minimumScore is defined, "minimumDefiningPhases" + has no effect on association as this phase check will be + superseded by the score check. It is the task of the score + processor to evaluate a proper score for all input Origins. + + + + + + Ignore and do not associate Origins derived + from CMT/MT inversions. + + + + + + Time range before the Origin time of an incoming Origin to search for + matching events. + + + + + + Time range after the Origin time of an incoming Origin to search for + matching events. + + + + + + Minimum number of matching picks between two Origins to be associated + to the same event. + + + + + + Negative time window: compare only pickIDs to find + matching arrivals. A non negative + value (including 0) compares pick times regardless + of the pickID. Pass: |pick1.time - pick2.time| <= threshold + + + + + + This parameter is only used in conjunction with + eventAssociation.maximumMatchingArrivalTimeDiff. + If a station has multiple associated arrivals for a + particular event, this flag defines if the time distance + of a new pick to all arrivals must be within + eventAssociation.maximumMatchingArrivalTimeDiff + or if one matching arrival is enough. + + + + + + Allows to match picks that are associated with weight 0. + + + + + + Associates an Origin with an existing event if the Origin + time differs not more than 60 seconds unless the + minimumMatchingArrivals criteria matches. + + + + + + Allowed location difference between an incoming Origin compared with + preferred Origins to get associated. + + + + + + Magnitude type priority list for becoming a preferred magnitude for an + event. + + Example: + + M, mBc, Mw(mB), Mwp, ML, MLh, MLv, mb + + + + + + If true, one magnitude will be preferred even if magnitude criteria are + not fullfilled. + + + + + + Minimum number of station magnitudes referenced to a network magnitude + to become a preferred magnitude. + + + + + + Minimum number of station magnitudes required for Mw(mB) to be considered as + preferred magnitude. + + + + + + Minimum number of station magnitudes which ensures that Mw(mB) will be + preferred and not mb. + + + + + + Average between mb and Mw(mB) which must be exceeded to become Mw(mB) + preferred. + + + + + + If false then the station count rules out the magnitude priority + which is only taken into account if two magnitudes have the + same station count. + + If true then the priority rules out the station count + which is only taken into account if two magnitudes have the + same priority. + + + + + + The general priority list to decide if an Origin becomes preferred. + The priority decreases in the order of the parameters. + This list is not used unless this parameter is activated. + + Empty priority list: scevent replicates the default hard wired behaviour: + AGENCY, STATUS, PHASES_AUTOMATIC, TIME_AUTOMATIC + + Each item in the list corresponds to a check that is performed. + Each check computes a score of the incoming Origin (s1) and the + current preferred Origin (s2). If the s1 is lower than s2, + the incoming Origin is rejected and does not become preferred. + All subsequent checks are ignored. + If s1 is equal to s2, the next check in the list is performed. + If s1 is larger than s2, the Origin becomes preferred and + all subsequent checks are ignored. + + Available tokens: + + AGENCY: check based on agency priorities + + AUTHOR: check based on author priorities + + MODE: evaluation mode priority: 0 = unset, 1 = automatic, 2 = manual, manual over-rules automatic + + STATUS: priority combined from evaluation status and evaluation mode: + -100 = status is rejected, -1 = status is reported, + 0 = status is preliminary or status is unset and mode is automatic, + 1 = status is confirmed or status is unset and mode is manual, + 2 = status is reviewed, 3 = status is final, + + METHOD: check based on the method priorities + + PHASES: higher phase count = higher priority + + PHASES_AUTOMATIC: only checks phase priorities for incoming automatic Origins + + RMS: lower rms = higher priority + + RMS_AUTOMATIC: only check RMS on incoming automatic Origins + + TIME: more recent Origins (creationTime) have higher priorities + + TIME_AUTOMATIC: only check creationTime priority on incoming automatic Origins + + SCORE: evaluates the score according to a configured ScoreProcessor and + prefers the Origin/Focalmechanism with the highest score. + + + + + + The agencyID priority list. When the eventtool comes to the point to + select a preferred Origin based on AGENCY it orders all Origins by its agency priority and + selects then the best one among the highest priority agency. It also defines the + agency priority for custom priority checks + (eventAssociation.priorities). + + The parameter is only considered when defined in "priorities". + + + + + + The author priority list. When the eventtool comes to the point to + select a preferred Origin based on AUTHOR it orders all Origins by its author priority and + selects then the best one among the highest priority author. It also defines the + author priority for custom priority checks (eventAssociation.priorities). + + The parameter is only considered when defined in "priorities". + + + + + + The method priority list. When the eventtool comes to the point to + select a preferred Origin based on METHOD it orders all Origins by its methodID priority and + selects then the best one among the highest priority method. It also defines the + method priority for custom priority checks (eventAssociation.priorities). + A defined method string must match exactly the string in Origin.methodID. + + The parameter is only considered when defined in "priorities". + + + + + + Defines the ScoreProcessor interface to be used along + with priority "SCORE". + + The parameter is only considered when defined in "priorities". + + + + + + If the preferred Origin has evaluation status 'rejected', the + Event type will be set to 'not existing' unless the Event + type has been fixed by an operator or the preferred Origin + has been fixed. + + + + + + Configures a timespan to delay Event creation. If a new Origin arrives + which cannot be associated to an existing Event, delay the Event creation for a certain + timespan. + + + + + + Region filter for creating events. Use with care! Origins + outside may be ignored even if they would + become preferred otherwise. + + + + Region by geographic coordinates. + + Format: "South, East, North, West" + + + + + + Minimum depth. + + + + + + Maximum depth. + + + + + + + The delayFilter group configures an Origin filter to activate the delay feature for + this Origin. If more than one filter is given they are combined with AND. + + + The agencyID of the Origin to be delayed. + + + The author of the Origin to be delayed. + + + + The evaluation mode of the Origin to be delayed. Can be either "manual" + or "automatic". + + + + + + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#encoding + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + diff --git a/etc/descriptions/scevent_regioncheck.xml b/etc/descriptions/scevent_regioncheck.xml new file mode 100644 index 0000000..a24a27f --- /dev/null +++ b/etc/descriptions/scevent_regioncheck.xml @@ -0,0 +1,100 @@ + + + + scevent + evrc plugin for scevent + + + + Test if events lie within or outside a region. + Events within a region are flagged as positive, outside as negative. + The event type is set accordingly. Add the + plugin "evrc" to the plugins parameter in the + order of priority to make this feature available. Read the + documentation of the RegionCheck for more details. + + + + Allow setting the event type. + The type of events which have manual origins will + not be changed unless configured explicitely by + "overwriteManual". + + + + + Allow overwriting existing event types. Disabling does not + allow accounting for changes in source region. + + + + + Allow setting the event type if the mode of the preferred + origin is manual or if the event type was set manually. + + + + + The list of closed BNA polygon names defining regions for + flagging event as positive or negative. + A polygon name defines a positive region but names with prefix ! (exclamation mark) + define negative regions. Evaluation is done in the order of the + polygons. The last matching criteria applies and the event type + is set accordingly. + + Default: If events are not positive or are negative regions the + event type is set to "outside of network interest". + Default: "!reject", use "accecpt" to overwrite the default. + + Examples: + + Events are flagged positive within the polygon "germany": + + germany + + All events are flagged positive but events within the polygon "quarries" are negative: + + accept,!quarries + + Events within the polygon "germany" are flagged positive but + all other events and events within the polygon "quarries" are negaitve: + + germany,!quarries + + All events are flagged positive but events within the polygon "germany" + are negative and all events within the polygon "saxony" are positive: + + accept,!germany,saxony + + + + + Read the event type, minDepth and maxDepth from the BNA polygon header. The header may contain + the values, e.g. header of a polygon with name "quarry": + + "quarry","rank 1","eventType: quarry blast, minDepth: -5, maxDepth: 10",13 + + When eventType is set, it supersedes eventTypePositive and eventTypeNegative. + When not set, eventTypePositive and eventTypeNegative are considered. + + + + + New type of an event which is flagged positive. Ignored + if readEventTypeFromBNA is active. + + Empty: Do not set type + + + + + New type of an event which is flagged negative. Ignored + if readEventTypeFromBNA is active. + + Empty means default: "outside of network interest" + + + + + + diff --git a/etc/descriptions/scevtlog.xml b/etc/descriptions/scevtlog.xml new file mode 100644 index 0000000..18ac721 --- /dev/null +++ b/etc/descriptions/scevtlog.xml @@ -0,0 +1,79 @@ + + + + Event log preserving the history of updates. + + + + Specify the output directory. Within this directory the logging + directory structure and the event files are stored. + + + + + Specify output event format (default is autoloc3). For completeness + it is recommended to switch to xml as storage format. The autoloc3 + format can be easily reconstructed with scbulletin but not the other + way around. + + + + + If format is xml then all XML files will be compressed with gzip + and stored with file extension ".xml.gz". They are also + valid gzip files and can be used as input to e.g. zgrep. + + + + + + scevtlog [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + + + + diff --git a/etc/descriptions/scevtls.xml b/etc/descriptions/scevtls.xml new file mode 100644 index 0000000..a03739a --- /dev/null +++ b/etc/descriptions/scevtls.xml @@ -0,0 +1,76 @@ + + + + List event IDs from database. + + + scevtls [options] + + + generic#help + generic#version + generic#config-file + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + + + + + + + + + + + + + diff --git a/etc/descriptions/scevtstreams.xml b/etc/descriptions/scevtstreams.xml new file mode 100644 index 0000000..2f9eed9 --- /dev/null +++ b/etc/descriptions/scevtstreams.xml @@ -0,0 +1,127 @@ + + + + + Extract stream information and time windows from an event. + + + + scevtstreams [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + + + + + + + + + + + + + + + + + + + + diff --git a/etc/descriptions/scheli.xml b/etc/descriptions/scheli.xml new file mode 100644 index 0000000..ea2529d --- /dev/null +++ b/etc/descriptions/scheli.xml @@ -0,0 +1,323 @@ + + + + Real-time helicorder view for one stream. + + + + + List of stream codes to be plotted (net.sta.loc.cha). + If not in capture mode only the first stream is shown. + When using a list, the first entry is considered. + Use commas for separating streams. + + Example: GR.MOX..BHZ + + + + Filter to be applied on the data. + + + Filter to be applied on the data. + + + Length of data per trace. + + + + The time format used to print the start and end time of the + whole plot (upper right corner). The format specification is + the one used in the strftime function (man strftime). + + + + + Set current time to last data sample. + + + + Line width of traces. + + + + A list of alternating row colors cycled through for painting + traces. + + + + + Use anti aliasing to plot the traces. The default uses the + settings from scheme.records.antiAliasing + + + + + + Add stream description to traces. + + + + + + Gain-corrected amplitudes given in units of the sensor. + For example: m/s. + + + + Define the method to scale traces within rows. Possible + values are: + + minmax: Scale all rows to configured minimum and maximum + amplitudes configured by amplitudeRange.min and + amplitudeRange.max + + row: Scale each row to the maximum within this row. + + + + + Minimum amplitude to show in trace. Requires + amplitudeRange.scale = "minmax". + + + + + Minimum amplitude to show in trace. Requires + amplitudeRange.scale = "minmax". + + + + + + Control dumping of PNG images. + Execute "scheli capture" for image generation in + the background without the graphics. + + + + Image creation interval. Negative values disable image + dumping. + If enabled, images are generated at the configured + interval. + + + + Name of output file. + The filename can contain placeholders + that are replaced by the corresponding streamID parts: + + %N : network code + + %S : station code + + %L : location code + + %C : channel code + + Placeholders are important if more than one stream + is given and capture mode is active. + + + + Image resolution. + + + Number of pixels horizontally. + + + Number of pixels vertically. + + + + + + + Define the path to a script that is called whenever an image + has been captured and written to disc. The only parameter is + the path to the generated image. + + + + + + + scheli [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + generic#print-config-vars + generic#validate-schema-params + generic#dump-settings + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + verbosity#print-context + verbosity#print-component + verbosity#log-utc + verbosity#trace + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#config-db + + + + records#record-driver-list + records#record-url + records#record-file + records#record-type + + + + gui#full-screen + gui#non-interactive + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/etc/descriptions/scimex.xml b/etc/descriptions/scimex.xml new file mode 100644 index 0000000..8f40597 --- /dev/null +++ b/etc/descriptions/scimex.xml @@ -0,0 +1,173 @@ + + + + SeisComP event exchange between two systems. + + + + Mode of operation. Options are IMPORT or EXPORT. + + + + Cache lifetime for objects. + + + + Only used in export mode. A list of message groups to subscribe. + + + + + Used only in import mode. It defines the source format of the + messages that need to be converted. Currently the import of + SeisComP datamodel version 0.51 (imexscdm0.51) + is supported which was used in release Barcelona (2008). + + + + + A list of hosts profiles to be considered for exporting. + These are used in hosts.$name directives + (see below) to define addresses, filter criteria, etc. + applicable to each recipient. + + + + + + A list of hosts profiles to be considered for importing. + These are used with hosts.$name directives similarly to + exportHosts. + + + + + A definition of an event filter. + + + Pair of doubles that defines the latitude range. + Example: -90:90. + + + + + Pair of doubles that defines the longitude range. + Example: -180:180. + + + + + Pair of doubles that defines the magnitude range. + Example: 3:10. + + + + + Number of minimum arrivals. + + + + + White list of AgencyIDs (the agency identifier which + appears in the objects sent over the export-import link). + + + + + + + + A sink definition used for either import or export. + + + + Address of a sink, as a host name with an optional port + number e.g. 'address = 192.168.1.1' or + 'address = somewhere.com:4803' + + + + + Defining filter criteria name for sink, e.g. criteria = + world-xxl. The criteria must be defined in the criteria. + * configuration lines. + + + + + Enable/disable filtering based on defined criteria. + If set to false, all events will pass, even if one + or more criteria are defined. + + + + + Optional target format for export. + + + + + Enable/disable defined routing tables. + + + + + Defining routing tables in the meaning of mapping + objects to message groups. Example: Pick:NULL, + StationAmplitude:NULL, Origin:LOCATION, + StationMagnitude: MAGNITUDE, NetworkMagnitude:MAGNITUDE, + MagnitudeReference:MAGNITUDE, OriginReference:EVENT, + Event:EVENT. Specifying NULL for the message group causes + messages to be thrown away/dropped/discarded. + + + + + + + + scimex [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + + + + + diff --git a/etc/descriptions/scimport.xml b/etc/descriptions/scimport.xml new file mode 100644 index 0000000..647d019 --- /dev/null +++ b/etc/descriptions/scimport.xml @@ -0,0 +1,250 @@ + + + + Forward messages across two SeisComP systems. + + + + URI of receiving host which runs scmaster. The URI contains + the host name with an optional protocol and port. + + Format: protocol://host:port + + Examples: + + - proc + + - scmp://proc:18180 + + + + + This option has to be set if the application runs in import mode. + The routing table has to be defined in the form of + source_group:sink_group + + + + + Define a list of message groups of the source system + (connection.server). If not specified, the source system is + queried for a list of message groups which are then used to check + each subscription extracted from the routing table. + This parameter allows to override to source query result. + + + + Enable/Disable filtering of messages + + + Define filter criteria before sending. + + Criteria for filtering picks. + + + The mode of picks to filter for. Allowed values: + "automatic" and "manual". + + + + + The status of picks to filter for. Allowed values: + "preliminary", "confirmed", + "reviewed", "final", + "rejected" and "reported". + + + + + The pick phase hint to filter for. Allowed values: + all possible phase codes. + + + + + The pick agencyIDs to filter for. Allowed values: + all possible agency ids; + + + + + The pick network code of the processed waveforms. + Allowed values: all possible network codes. + + + + + Criteria for filtering amplitudes + + + The amplitude comparison operator. Allowed values: + "eq", "lt", "gt" and "*". + + + + + The amplitude threshold to filter for. The operator + configured with "operator" is used to compare this threshold with + the incoming value. If "operator" is "*" then + values will pass. + + + + + The amplitude agencyIDs to filter for. Allowed values: + all possible agency ids. + + + + + Criteria for filtering origins + + + The latitude range in format [min]:[max]. + + + + + The longitude range in format [min]:[max]. + + + + + The depth range in format [min]:[max]. + + + + + The origin agencyIDs to filter for. Allowed values: + all possible agency ids; + + + + + The origin evaluation mode to filter for. Allowed values: + "automatic" and "manual". + + + + + The origin status to filter for. Allowed values: + "preliminary", "confirmed", + "reviewed", "final", + "rejected" and "reported". + + + + + The minimum number of arrivals of an origin to pass + the filter. + + + + + Criteria for filtering events + + + The event type to filter for, e.g. "earthquake", + "explosion" ... + + + + + Criteria for filtering station magnitudes + + + The station magnitude type. Allowed values: all possible + magnitude types such as "MLv". + + + + + Criteria for filtering network magnitudes + + + The magnitude type. Allowed values: all possible + magnitude types such as "MLv". + + + + + Criteria for filtering QC parameters + + + The QC parameter type. Allowed values: all possible + types such as "latency", "delay" ... + + + + + + + + scimport [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + + + + + + + + + diff --git a/etc/descriptions/scinv.xml b/etc/descriptions/scinv.xml new file mode 100644 index 0000000..3595c74 --- /dev/null +++ b/etc/descriptions/scinv.xml @@ -0,0 +1,210 @@ + + + + Inventory database synchronisation. + + + scinv command [options] [files] + + + Command is one of: sync, merge, apply, keys, ls and check. + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + generic#print-config-vars + generic#validate-schema-params + generic#dump-settings + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + verbosity#print-context + verbosity#print-component + verbosity#log-utc + verbosity#trace + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Synchronise key files. + + + + + Delete key files if a station does not exist in inventory. + + + + + Quantities probed when using the check command. + + + + Maximum allowed distance between station and sensor location + coordinates. + + + + + Maximum allowed differences between elevation of station and + sensor location. + + + + + Maximum allowed depth of channel (sensor). This is the depth + of the sensor below the surface. + + + + + + diff --git a/etc/descriptions/scm.xml b/etc/descriptions/scm.xml new file mode 100644 index 0000000..9942736 --- /dev/null +++ b/etc/descriptions/scm.xml @@ -0,0 +1,64 @@ + + + + Process monitor. + + + + + scm [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + + + + + + + diff --git a/etc/descriptions/scmag.xml b/etc/descriptions/scmag.xml new file mode 100644 index 0000000..b600d49 --- /dev/null +++ b/etc/descriptions/scmag.xml @@ -0,0 +1,201 @@ + + + + Calculates magnitudes of different types. + + + + The magnitude types to be calculated. Station magnitudes are + computed from their amplitudes, network magnitudes from their + station magnitudes. + + + + + General parameters for computing magnitudes. Others are configured + by global binding parameters for specific magnitude types. + + + + The methods for computing the network magnitude + from station magnitudes. Exactly one method per + magnitude can be configured. + To define the averaging method per magnitude type append + the type after colon, e.g.: + "magnitudes.average = default, MLv:median" + + default: Compute the mean if less than 4 contributed + station magnitudes exist. Otherwise apply a trimmed mean + of 25%. + + Options are "default", "mean", + "median", "trimmedMean" and + "medianTrimmedMean". + + + + + + + Interval between 2 sending processes. The interval controls + how often information is updated. + + + + + The minimum weight of an arrival for an associated amplitude + to be used for calculating a magnitude. + + + + + + The summary magnitude is the weighted average from all + defined network magnitude types: Single network magnitude values + are multiplied with their magnitude-type specific weight and + summed up. The resulting sum is divided by the sum of all weights. + + + Enables summary magnitude calculation. + + + Define the type/name of the summary magnitude. + + + + This is the minimum station magnitude required for any + magnitude to contribute to the summary magnitude at all. If + this is set to 4, then no magnitude with less than 4 station + magnitudes is taken into consideration even if this results + in no summary magnitude at all. For this reason, the default + here is 1 but in a purely automatic system it should be + higher, at least 4 is recommended. + + + + + Define the magnitude types to be excluded from the summary + magnitude calculation. + + + + + Define the magnitude types to be included in the summary + magnitude calculation. + + + + + The coefficients defining the weight of network magnitudes + for calculating the summary magnitude. + Weight = a * magnitudeStationCount + b. + + + + Define the coefficients a. To define the value per magnitude + type append the type after colon. A value without a + type defines the default value. + + + + + Define the coefficients b. To define the value per magnitude + type append the type after colon. A value without a + type defines the default value. + + + + + + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + + + + + + + + + + diff --git a/etc/descriptions/scmaster.xml b/etc/descriptions/scmaster.xml new file mode 100644 index 0000000..cc6ca4f --- /dev/null +++ b/etc/descriptions/scmaster.xml @@ -0,0 +1,516 @@ + + + + The messaging system + + + + The default set of message groups for each queue. Only used + if a queues group list is unset (note: empty is not unset). + + + + + Enable messaging queues defined as profile in queues. The profile + names are the final queue names. + + + + + Control the messaging interface. The default protocol is + "scmp" but "scmps" (secure protocol) is + used when valid SSL certificate and key are configured. + + + + Local bind address and port of the messaging system. + 0.0.0.0:18180 accepts connections from all clients, + 127.0.0.1:18180 only from localhost. + + + + + The IP access control list for clients which are allowed + to connect to the interface. + + + + + SO_REUSEADDR socket option for the TCP listening socket. + + + + + SSL encryption is used if key and certificate are configured. + + + + Additional local bind address and port of the messaging + system in case SSL encryption is active. + + + + + The IP access control list for clients which are allowed + to connect to the interface. + + + + + SO_REUSEADDR socket option for the TCP listening socket. + + + + + + + + + + + + + + + + + Set the parameters for each messaging queue. The queues are used + when listed in the "queues" parameter. Several queues + can be used in parallel. For queues with without databases leave + the processor parameters empty. + + + + + Define the list of message groups added to the queue. + If unset, then the defaultGroups will be used. + A queue will always add the default group "STATUS_GROUP". + This parameter overrides defaultGroups. + + + + + The IP access control list for clients which are allowed + to join the queue. + + + + + The maximum size in bytes of a message to be accepted. + Clients which send larger messages will be disconnected. + The default is 1MB. + + + + + List of plugins required by this queue. This is just a + convenience parameter to improve configurations + readability. The plugins can also be added to the + global list of module plugins. + + Example: dbstore + + + + + + + Interface name. For now, use "dbstore"to + use a database. + + Use empty for testing or playbacks without a database. + + + + + + Define the database connection parameters. + + + + Selected the database driver to use. + Database drivers are available through plugins. + The default plugin is dbmysql which supports + the MYSQL database server. It is activated + with the core.plugins parameter. + + + + + Set the database read connection which is + reported to clients that connect to this server. + If a remote setup should be implemented, + ensure that the hostname is reachable from + the remote computer. + + + + + Set the database write connection which is + private to scmaster. + A separate write connection enables different + permissions on the database level for scmaster + and clients. + + + + + If enabled, the plugin will check the database + schema version and refuse to start if the + version doesn't match the latest version. + If disabled and the an object needs to be + stored, which is incompatible with the + database schema, this object is lost. + Leave this option enabled unless you know + exactly what are you doing and what the + consequences are. + + + + + + + + + + + The directory served by the http server at staticPath. + + + + + The URL path at which html files and assets are available. + All files under filebase will be served at this URL path. + + + + + The URL path at which the broker websocket is available. + + + + + + + + Enable database storage. + + Enable/disable the database. + If the database is not enabled, the processed data is not + stored persistently. + Leave this option enabled unless you know what you are doing. + + + + + + + + + + + + + + + + scmaster [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#print-config-vars + generic#validate-schema-params + generic#dump-settings + generic#daemon + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + verbosity#print-context + verbosity#print-component + verbosity#log-utc + verbosity#trace + + + + + + + + + diff --git a/etc/descriptions/scmssort.xml b/etc/descriptions/scmssort.xml new file mode 100644 index 0000000..9149a39 --- /dev/null +++ b/etc/descriptions/scmssort.xml @@ -0,0 +1,64 @@ + + + + + Read and manipulate miniSEED records + + + + + scmssort [options] input + + + + + + + + + + + + + + + diff --git a/etc/descriptions/scmv.xml b/etc/descriptions/scmv.xml new file mode 100644 index 0000000..8307070 --- /dev/null +++ b/etc/descriptions/scmv.xml @@ -0,0 +1,200 @@ + + + + Map view + + + + Start scmv in one of the available display modes: + + groundmotion or qualitycontrol + + and without tabs and menus as walldisplay. + + + + + Set the maximum latency in seconds of the ground motion records + to be valid. + + + + + Set the time span in seconds to keep events. + + + + + Set the time span in seconds to read events initially from + database. + + + + + If a new origin/event is set/selected this option defines if the + map is centered or not at the origin location. + + + + + Time span of which an event is active after origin time to + show blinking associated stations. + + + + + A positive value (greater than zero) defines the interval to check + for expired events. A negative or zero value disables the interval + check and expired events are only removed when a new event is declared + or an existing event is updated. + + + + + Enable/disable drawing of station annotations at startup. + + + + + Enable/disable drawing of station annotations with + location/channel codes. + + + + + Set the location of the map symbol legend (QC, ground motion). + Use either: topleft, topright, bottomright or bottomleft. + + + + + Set the location of the event symbol legend. Use either: + topleft, topright, bottomright or bottomleft. + + + + + + Whether to show the event table initially or not. + + + + + The columns that are visible in the table. If nothing + is specified then all columns are visible. Valid column names are: + "Event", "Origin Time", "Magnitude", + "Magnitude Type", "Region", "Latitude", + "Longitude", "Depth". + + + + + + + The filter applied to waveforms for measuring ground motion. + + + + + + Allow to define an initial rectangular region for the map. + + + + Minimum latitude in degrees. + + + + Minimum longitude in degrees. + + + Maximum latitude in degrees. + + + Maximum longitude in degrees. + + + + + + scmv [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + generic#print-config-vars + generic#validate-schema-params + generic#dump-settings + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + verbosity#print-context + verbosity#print-component + verbosity#log-utc + verbosity#trace + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + records#record-driver-list + records#record-url + records#record-file + records#record-type + + + + cities#city-xml + + + + gui#full-screen + gui#non-interactive + + + + + + + + + diff --git a/etc/descriptions/scolv.xml b/etc/descriptions/scolv.xml new file mode 100644 index 0000000..7999c8e --- /dev/null +++ b/etc/descriptions/scolv.xml @@ -0,0 +1,752 @@ + + + + Origin locator view + + + + Number of days to preload events from the database when staring scolv. + + + + + List of magnitude types to be calculated when + computing magnitudes. This list can be altered interactively so + this reflects only the default unless computeMagnitudesSilently + is set to true. + + + + + A list of magnitude types to be displayed in the summary widget (F8). + + + + + Label of the button that triggers the script defined in + "scripts.script0". + + + + + Label of the button that triggers the script defined in + "scripts.script1". + + + + + Define scripts linked to extra buttons. A button is created + by definition of the corresponding script. + + + + Full path to the script which enables a publish button in the + summary panel on the left side of scolv (activated with F8). + + + + + Full path to the script executed when hitting the button labeled with "button0". + OriginID and EventID are passed to the script as arguments. + + + + + Full path to the script executed when hitting the button labeled with "button1". + OriginID and EventID are passed to the script as arguments. + + + + + + If the publish button is clicked while another script is + still running, a message box pops up with a warning that + a script is still running and to option to terminate it. If + this flag is true, then it will be terminated automatically + without user interaction and the warning message. + + + + + + + + Sets the default event type to be used when confirming a solution. If + not set, the event type is not changed at all unless "with options" + is used. The origin or its source must be associated with an event already. + + + + + A list of event types which are commonly set. This defines + a priority list which will be displayed on top of the event + type selection menu. Additionally the remaining (non-prioritized) + types are displayed with less contrast. + The order is preserved in the event editor as well as in the drop-down + list of the "Commit with additional options" dialog. + + + + + Sets the default magnitude aggregation method. It can be either "mean", + "trimmed mean" or "median". If not set, the default + behavior is used which computes the mean if less than 4 stations are available, + trimmed mean otherwise. + + + + + Automatically starts computation of magnitudes after relocating an + origin. + + + + + Starts magnitude computation without user interaction if everything + runs without errors. In case of errors a window pops up with + the current state of processing. + + + + + If enabled, a magnitude selection pop window will open + in advance to compute magnitudes. The default selection + reflects the configuration parameter "magnitudes". + + + + + The velocity applied to reduce travel times in the + "MoveOut" plot of the Location tab. + + + + + Sets the drawing of lines in the map connecting the station with the event. + + + + + Sets the drawing of grid lines in the plot. + + + + + If a locator does not populate the take off angle in its arrivals, + the first motion plot will not show picked polarities. This option + defines whether to compute take off angles that are not present + in the arrivals or not. + + + + + The default value for adding unassociated stations in the picker. + + + + + If enabled, all traces without data and arrivals are + hidden in the picker. + + + + + If enabled, all traces of disabled stations without picks are hidden. + + + + + When creating an artificial origin this is the default depth used. + + + + + Adds advanced input parameters in artificial origin dialog. + + + + + Adds an icon to the system tray and displays a message + when a new event was received. Note that this feature + requires libQt4 version 4.3 or higher. + + + + + + + Maximum distance in degrees up to which also unassociated stations are + shown in the map. 360.0 deg which will + show all unassociated stations. + + + + + + + Default map radius in degrees if a new event is selected. + A negative value restores the default behavior. + + + + + + + + Defines the visible columns in the arrival table. Valid tokens are: + Used, Status, Phase, Weight, Method, Polarity, Takeoff, + Net, Sta, Loc/Cha, Timeres, Dis, Az, Time, +/-, + Slo, Slores, Baz, Bazres, Created, Latency + + + + + + + The default locator interface to use. + + + + + The default locator profile to use. + + + + + Minimum depth which is set if the locator used supports fixing + the depth. If the depth is lower than this value, the depth is + fixed to this configured value and the origin is relocated again. + + + + + Whether to preset the locator settings when an origin + is being loaded. It tries to figure the locator type + from Origin.methodID and the profile from Origin.earthModelID. + + + + + + + Activates the button "Force association with event" when + committing with additional options. This is just the default + setting and can be changed manually when the dialog pops up. + + + + + Activates the button "Fix this origin as preferred solution" when + committing with additional options. This is just the default + setting and can be changed manually when the dialog pops up. + + + + + Whether to return to the event list after committing with + additional options or not. + + + + + Turns the event comment input field into a drop down box + allowing the user to select from a list a predefined comment + values which may be changed if required. + + + + + + Configure custom commit buttons which + behave like shortcuts to "Commit with additional options". + Pressing a custom button does not open a popup window. Instead + it reads the configured profile and applies the options to the event + and origin. Each commit button can be coloured and labeled. + + + + + Enable this commit button. + + + + + Ask for confirmation by presenting the "commit with options" + dialog with the options pre-populated from this configuration. + Cancelling the dialog aborts the commit procedure. + + + + + Associate a new origin to the currently active event or not. + + + + + Fix the origin as preferred + solution in the event it is associated to. + + + + + The event type to be set of the event associated with + the committed origin. If unset, the type remains unchanged. + Use quotes for event types with spaces, e.g. + "not existing". + + + + + The event type certainty to be set of the event + associated with the committed origin. If unset, + the type certainty remains unchanged. + + + + + The status to be set of the origin being + committed. If unset, the status remains unchanged. + If empty, the origin status will be set to none. + + + + + Sets the preferred magnitude type of the event. If + unset, no changes will be requested. If empty, + the currently fixed preferred magnitude type will + be reset. + + + + + Optional event name which will be set. This option + exists for completeness and does not make much + sense to be applied on any event. Use quotes for event + names with spaces, e.g. "Quarry XYZ". + + + + + Optional event comment which will be set. Use quotes for event + comments with spaces, e.g. "Blast on Monday morning". + + + + + Return to the event list after committing or not. + + + + + The commit button label. Use quotes for labels with + spaces, e.g. "Earthquake reported". + + + + + Optional primary color for the bulk commit button. + Use hexadecimal values. + + + + + Optional text color for the bulk commit button. + Use hexadecimal values. + + + + + Add a summary of the actions to the button's tooltip. + + + + + + + Parameters controlling the phase picker window. + + + Activate a cross hair cursor showing the configured uncertainties + when in picking mode. + + + + + Load all existing picks for the requested data time window. + + + + + If enabled, the picker will not add stations that are not + configured with a detecStream in global bindings. + + + + + Limit the data acquisition to the number of nearest + stations given by "picker.limitStationAcquisitionCount". + + + + + Limit the data acquisituion to the given number of + the nearest stations. Requires to activate + "picker.limitStationAcquisition". + + + + + If enabled, acceleration data is loaded for co-located + stations where picked on velocity data. + + + + + Load all three components by default. If disabled, additional + components are fetched upon request. + + + + + If enabled, all three components are shown in the picker + zoom trace. The distance is limited to what is configured + in "picker.allComponentsMaximumDistance". + + + + + The distance limit for the "picker.showAllComponents" flag. + + + + + If enabled, data is requested around the P pick for each stations. + Otherwise origin time is used and the time window for all stations + is the same. + + + + + After picking and pushing the solution back to scolv all automatic arrivals + are removed from a station if a manual pick exists. + + + + + After picking and pushing the solution back to scolv all automatic arrivals + are removed from all stations regardless if a manual pick exists or not. + + + + + The data time span added to the left of either origin time or + first arrival depending on the usePerStreamTimeWindows flag. + + + + + The data time span added to the right of the requested time window. + + + + + Minimum data time window length. + + + + + Defines the relative position of the alignment when traces are + aligned at a phase. This value is bound + to interval [0:1] where 0 is the left side and 1 is right + side of the trace window. + + + + + Start of the re-picker time window relative the cursor position on the trace. + + + + + End of the re-picker time window relative the cursor position on the trace. + + + + + List of filters available in the picker. Format: + "name1;filter-definition1", "name2;filter-definition2" + + + + + A list of channel codes to be used when searching for + velocity channels. Usually this information is part of the + meta data. In case it is not, this list is used. + + + + + A list of channel codes to be used when searching for + acceleration channels. Usually this information is part of the + meta data. In case it is not, this list is used. + + + + + Phases for which theoretical arrival times are shown. + + + + + A list of phases that is available for picking in the Picking menu. + The phases are not accessible through hot keys as for + "picker.phases.favourites". + The phases are considered if the more complex phase group definition + "picker.phases.groups" is not provided. + + + + + List of pre-defined pick uncertainties. + The format of each list item is either a double value or a pair of + double values for asymmetric uncertainties, e.g. + "val1","(val2,val3)". + This parameter is only used if "picker.uncertainties.preferred" + is undefined. + + + + + Phases defined for picking. The phase phases can be selected + from the Picking menu of the picker window. + Make sure, the locator can handle all defined phases. + + + + Phase types available for rapid picking by hot keys and + in the Picking menu. Separate the types by comma. + + + + + List of defined phase type profiles to be considered. + Separate profiles by comma. + + + + + Definiton of phase type profiles. The profiles and phase types + can be selected from the Picking menu of the picker window + but not with hot keys. + + + + + List of phase types. Separate types by comma. + + + + + + + Define uncertainty profiles. Once defined, + they can be selected in the scolv picker settings [F3]. + The profiles override uncertainties defined in "uncertainties". + + + + Pick uncertainty profiles to be loaded. The first profile + will be used by default. + + + + + + + List of pre-defined pick uncertainties. + The format of each list item is either a double value or a pair of + double values for asymmetric uncertainties, e.g. + "val1","(val2,val3)". + + + + + + + + Settings related to waveform integration which may be + required if the desired waveform unit is changed and the + sensor data must be integrated once or multiple times. + + + + An optional pre-filter which is applied prior to the + numerical integration to stabilize the result. This + filter is applied for each integration step or just + once depending on the parameter "applyOnce". + Multiple integrations are necessary if acceleration + data should be converted to displacement. + + + + + Whether to apply the pre-filter only prior the first + integration step or prior to each step. + + + + + + Parameters controlling the amplitude picker window for computing station magnitudes. + + + Time span in seconds to be added to the left of the used + amplitude time window. + + + + + Time span in seconds to be added to the right of the used + amplitude time window. + + + + + A list of filters used for the amplitude picker. Format: + "name1;filter-definition1", "name2;filter-definition2" + + + + + + Display custom information in the Location tab. + + + + Origin information + + + + Name(s) of quality addon profile(s) to be considered. + The profiles provide values through a script which are + shown in the location tab. The scolv documentation + provides an example script. + + + + + Display origin comments. + + + + ID of the origin comment to be considered. + + + + + Value to be shown in case no valid origin comment is found. + + + + + Label of the value to be shown. + + + + + + + + Defines the label text of the information panel in the + Location tab of the parameter evaluated by this addon. + + + + + Path to script to execute. + + + + + + + + + + scolv [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + + + + + records#record-driver-list + records#record-url + records#record-file + records#record-type + + + + cities#city-xml + + + + gui#full-screen + gui#non-interactive + + + + + + + + + + + diff --git a/etc/descriptions/scorgls.xml b/etc/descriptions/scorgls.xml new file mode 100644 index 0000000..c06cd3c --- /dev/null +++ b/etc/descriptions/scorgls.xml @@ -0,0 +1,64 @@ + + + + List origin IDs from database. + + + scorgls [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + + + + + + + + + + diff --git a/etc/descriptions/scqc.xml b/etc/descriptions/scqc.xml new file mode 100644 index 0000000..e9224d1 --- /dev/null +++ b/etc/descriptions/scqc.xml @@ -0,0 +1,886 @@ + + + + Derives quality control (QC) parameters from waveforms. + + + ID of QC message creator + + + + Limit QC determination to processed data streams which are + defined in global bindings with detecLocid and detecStream. + + + + + If useConfiguredStreams is true then this parameter decides whether + to use only the vertical component (default) or all three + components. The 3 components are collected from the inventory + according to channel orientation. If that is not possible + then the fixed components N and E will be used. + + + + + If useConfiguredStreams is false, then + load (from inventory) only those streams, matching the streamMask + + RegEx e.g. "^(NET1|NET2)\.(STA1|STA2|STA3)\.(LOC)\.((BH)|(LH)|(HH))Z$" + + RegEx e.g. "^(.+)\.(.+)\.(.*)\.(.+)Z$" + + streamMask = "^(.+)\.(.+)\.(.*)\.(BHZ)$" + + + + + Database look up for past entries not older than x days + (to determine the last QC parameter calculated). + + + + Control parameters for individual QC plugins. + + + Default configuration parameters for the plugins. + Can be overwritten by the individual plugin configurations. + + + + Enable to use this plugin only for realtime processing. + + Default [disable]: plugin is able to process archived data AND realtime data streams. + + + + + Filter string used to filter waveforms before processing. + + + + + Qc-Buffer Length [s] (aka LTA-Buffer). Must be >= plugins.*.bufferLength. + + + + Control interactions with the messaging system. + + + Interval for sending report messages which e.g. + will be may be displayed by scqcv. + + + + Time window for calculating QC reports. + + + + Report messages are generated in case no data + is received since timeout. Only in real-time processing. + + + + + + Control sending QC reports as notifier to the messaging where + scmaster will write them to the database. + + + + Interval to send the QC reports. + + Default [-1]: Do not send any QC report. This will prevent writing + QC reports into the database. + + + + Time window for calculating QC reports. + + + + Control for sending alerts based on QC reports. + + + Interval for checking and sending alert thresholds. + -1 disables threshold checking. + + + + Time window for calculating QC reports. + + + List of thresholds for sending QC reports. Use comma to separate values. + + + + + + Configuration parameters for the plugin qcplugin_latency. Overwrites the parameters from the default section. + + + + Enable to use this plugin only for realtime processing. + + Default [disable]: plugin is able to process archived data AND realtime data streams. + + + + + Filter string used to filter waveforms before processing. + + + + + Qc-Buffer Length [s] (aka LTA-Buffer). Must be >= plugins.*.bufferLength. + + + + Control interactions with the messaging system. + + + Interval for sending report messages which, e.g., + can be displayed by scqcv. + + + + Time window for calculating QC reports. + + + + Report messages are generated in case no data + is received since timeout. Only in real-time processing. + + + + + Control sending QC reports as notifier to the messaging where + scmaster will write them to the database. + + + + Interval to send the QC reports. + + Default [-1]: Do not send any QC report. This will prevent writing + QC reports into the database. + + + + Time window for calculating QC reports. + + + + Control for sending alerts based on QC reports. + + + Interval for checking and sending alert thresholds. + -1 disables threshold checking. + + + + Time window for calculating QC reports. + + + List of thresholds for sending QC reports. Use comma to separate values. + + + + + + Configuration parameters for the plugin qcplugin_delay. Overwrites the parameters from the default section. + + + + Enable to use this plugin only for realtime processing. + + Default [disable]: plugin is able to processd data AND realtime data streams. + + + + + Filter string used to filter waveforms before processing. + + + + + Qc-Buffer Length [s] (aka LTA-Buffer). Must be >= plugins.*.bufferLength. + + + + Control interactions with the messaging system. + + + Interval for sending report messages which e.g. + will be may be displayed by scqcv. + + + + Time window for calculating QC reports. + + + + Report messages are generated in case no data + is received since timeout. Only in real-time processing. + + + + + Control sending QC reports as notifier to the messaging where + scmaster will write them to the database. + + + + Interval to send the QC reports. + + Default [-1]: Do not send any QC report. This will prevent writing + QC reports into the database. + + + + Time window for calculating QC reports. + + + + Control for sending alerts based on QC reports. + + + Interval for checking and sending alert thresholds. + -1 disables threshold checking. + + + + Time window for calculating QC reports. + + + List of thresholds for sending QC reports. Use comma to separate values. + + + + + + Configuration parameters for the plugin qcplugin_gap. Overwrites the parameters from the default section. + + + + Enable to use this plugin only for realtime processing. + + Default [disable]: plugin is able to process archived data AND realtime data streams. + + + + + Filter string used to filter waveforms before processing. + + + + + Qc-Buffer Length [s] (aka LTA-Buffer). Must be >= plugins.*.bufferLength. + + + + Control interactions with the messaging system. + + + Interval for sending report messages which e.g. + will be may be displayed by scqcv. + + + + Time window for calculating QC reports. + + + + Report messages are generated in case no data + is received since timeout. Only in real-time processing. + + + + + Control sending QC reports as notifier to the messaging where + scmaster will write them to the database. + + + + Interval to send the QC reports. + + Default [-1]: Do not send any QC report. This will prevent writing + QC reports into the database. + + + + Time window for calculating QC reports. + + + + Control for sending alerts based on QC reports. + + + Interval for checking and sending alert thresholds. + -1 disables threshold checking. + + + + Time window for calculating QC reports. + + + List of thresholds for sending QC reports. Use comma to separate values. + + + + + + Configuration parameters for the plugin qcplugin_offset. Overwrites the parameters from the default section. + + + + Enable to use this plugin only for realtime processing. + + Default [disable]: plugin is able to process archived data AND realtime data streams. + + + + + Filter string used to filter waveforms before processing. + + + + + Qc-Buffer Length [s] (aka LTA-Buffer). Must be >= plugins.*.bufferLength. + + + + Control interactions with the messaging system. + + + Interval for sending report messages which e.g. + will be may be displayed by scqcv. + + + + Time window for calculating QC reports. + + + + Report messages are generated in case no data + is received since timeout. Only in real-time processing. + + + + + Control sending QC reports as notifier to the messaging where + scmaster will write them to the database. + + + + Interval to send the QC reports. + + Default [-1]: Do not send any QC report. This will prevent writing + QC reports into the database. + + + + Time window for calculating QC reports. + + + + Control for sending alerts based on QC reports. + + + Interval for checking and sending alert thresholds. + -1 disables threshold checking. + + + + Time window for calculating QC reports. + + + List of thresholds for sending QC reports. Use comma to separate values. + + + + + + Configuration parameters for the plugin qcplugin_overlap. Overwrites the parameters from the default section. + + + + Enable to use this plugin only for realtime processing. + + Default [disable]: plugin is able to process archived data AND realtime data streams. + + + + + Filter string used to filter waveforms before processing. + + + + + Qc-Buffer Length [s] (aka LTA-Buffer). Must be >= plugins.*.bufferLength. + + + + Control interactions with the messaging system. + + + Interval for sending report messages which e.g. + will be may be displayed by scqcv. + + + + Time window for calculating QC reports. + + + + Report messages are generated in case no data + is received since timeout. Only in real-time processing. + + + + + Control sending QC reports as notifier to the messaging where + scmaster will write them to the database. + + + + Interval to send the QC reports. + + Default [-1]: Do not send any QC report. This will prevent writing + QC reports into the database. + + + + Time window for calculating QC reports. + + + + Control for sending alerts based on QC reports. + + + Interval for checking and sending alert thresholds. + -1 disables threshold checking. + + + + Time window for calculating QC reports. + + + List of thresholds for sending QC reports. Use comma to separate values. + + + + + + Configuration parameters for the plugin qcplugin_rms. Overwrites the parameters from the default section. + + + + Enable to use this plugin only for realtime processing. + + Default [disable]: plugin is able to process archived data AND realtime data streams. + + + + + Filter string used to filter waveforms before processing. + + + + + Qc-Buffer Length [s] (aka LTA-Buffer). Must be >= plugins.*.bufferLength. + + + + Control interactions with the messaging system. + + + Interval for sending report messages which e.g. + will be may be displayed by scqcv. + + + + Time window for calculating QC reports. + + + + Report messages are generated in case no data + is received since timeout. Only in real-time processing. + + + + + Control sending QC reports as notifier to the messaging where + scmaster will write them to the database. + + + + Interval to send the QC reports. + + Default [-1]: Do not send any QC report. This will prevent writing + QC reports into the database. + + + + Time window for calculating QC reports. + + + + Control for sending alerts based on QC reports. + + + Interval for checking and sending alert thresholds. + -1 disables threshold checking. + + + + Time window for calculating QC reports. + + + List of thresholds for sending QC reports. Use comma to separate values. + + + + + + Configuration parameters for the plugin qcplugin_spike. Overwrites the parameters from the default section. + + + + Enable to use this plugin only for realtime processing. + + Default [disable]: plugin is able to process archived data AND realtime data streams. + + + + + Filter string used to filter waveforms before processing. + + + + + Qc-Buffer Length [s] (aka LTA-Buffer). Must be >= plugins.*.bufferLength. + + + + Control interactions with the messaging system. + + + Interval for sending report messages which e.g. + will be may be displayed by scqcv. + + + + Time window for calculating QC reports. + + + + Report messages are generated in case no data + is received since timeout. Only in real-time processing. + + + + + Control sending QC reports as notifier to the messaging where + scmaster will write them to the database. + + + + Interval to send the QC reports. + + Default [-1]: Do not send any QC report. This will prevent writing + QC reports into the database. + + + + Time window for calculating QC reports. + + + + Control for sending alerts based on QC reports. + + + Interval for checking and sending alert thresholds. + -1 disables threshold checking. + + + + Time window for calculating QC reports. + + + List of thresholds for sending QC reports. Use comma to separate values. + + + + + + Configuration parameters for the plugin qcplugin_timing. Overwrites the parameters from the default section. + + + + Enable to use this plugin only for realtime processing. + + Default [disable]: plugin is able to process archived data AND realtime data streams. + + + + + Filter string used to filter waveforms before processing. + + + + + Qc-Buffer Length [s] (aka LTA-Buffer). Must be >= plugins.*.bufferLength. + + + + Control interactions with the messaging system. + + + Interval for sending report messages which e.g. + will be may be displayed by scqcv. + + + + Time window for calculating QC reports. + + + + Report messages are generated in case no data + is received since timeout. Only in real-time processing. + + + + + Control sending QC reports as notifier to the messaging where + scmaster will write them to the database. + + + + Interval to send the QC reports. + + Default [-1]: Do not send any QC report. This will prevent writing + QC reports into the database. + + + + Time window for calculating QC reports. + + + + Control for sending alerts based on QC reports. + + + Interval for checking and sending alert thresholds. + -1 disables threshold checking. + + + + Time window for calculating QC reports. + + + List of thresholds for sending QC reports. Use comma to separate values. + + + + + + Configuration parameters for the plugin qcplugin_availability. Overwrites the parameters from the default section. + + + + Enable to use this plugin only for realtime processing. + + Default [disable]: plugin is able to process archived data AND realtime data streams. + + + + + Filter string used to filter waveforms before processing. + + + + + Qc-Buffer Length [s] (aka LTA-Buffer). Must be >= plugins.*.bufferLength. + + + + Control interactions with the messaging system. + + + Interval for sending report messages which e.g. + will be may be displayed by scqcv. + + + + Time window for calculating QC reports. + + + + Report messages are generated in case no data + is received since timeout. Only in real-time processing. + + + + + Control sending QC reports as notifier to the messaging where + scmaster will write them to the database. + + + + Interval to send the QC reports. + + Default [-1]: Do not send any QC report. This will prevent writing + QC reports into the database. + + + + Time window for calculating QC reports. + + + + Control for sending alerts based on QC reports. + + + Interval for checking and sending alert thresholds. + -1 disables threshold checking. + + + + Time window for calculating QC reports. + + + List of thresholds for sending QC reports. Use comma to separate values. + + + + + + Configuration parameters for the plugin qcplugin_outage. Overwrites the parameters from the default section. + + + + Enable to use this plugin only for realtime processing. + + Default [disable]: plugin is able to process archived data AND realtime data streams. + + + + + Filter string used to filter waveforms before processing. + + + + + Qc-Buffer Length [s] (aka LTA-Buffer). Must be >= plugins.*.bufferLength. + + + + + If there is a gap of more than notifyDB [s], write an OUTAGE entry into the database. + + + + Control interactions with the messaging system. + + + Interval for sending report messages which e.g. + will be may be displayed by scqcv. + + + + Time window for calculating QC reports. + + + + Report messages are generated in case no data + is received since timeout. Only in real-time processing. + + + + + Control sending QC reports as notifier to the messaging where + scmaster will write them to the database. + + + + Interval to send the QC reports. + + Default [-1]: Do not send any QC report. This will prevent writing + QC reports into the database. + + + + Time window for calculating QC reports. + + + + Control for sending alerts based on QC reports. + + + Interval for checking and sending alert thresholds. + -1 disables threshold checking. + + + + Time window for calculating QC reports. + + + List of thresholds for sending QC reports. Use comma to separate values. + + + + + + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + + + + + records#record-driver-list + records#record-url + records#record-file + records#record-type + + + + diff --git a/etc/descriptions/scqcv.xml b/etc/descriptions/scqcv.xml new file mode 100644 index 0000000..bae166f --- /dev/null +++ b/etc/descriptions/scqcv.xml @@ -0,0 +1,1758 @@ + + + + Quality control view. + + + + List of QC parameters to be displayed in the details table. + Read the scqc documentation for a list of available QC parameters + and the default configuration of scqcv + $SEISCOMP_ROOT/etc/defaults/scqcv.cfg for more examples. + + Format: "Parameter name : ConfigName" + + Example: "delay : delay","spikes count : spike","spikes amplitude : + spikeAmplitude". + Refer to the parameters by their ConfigName to configure the + attributes. + + + + + + + The list of channel codes to be displayed. List items may + contain wildcards and are separated by comma. + The list is intersected with all channels configured in + inventory. + + Examples: + + default : display all streams configured by global bindings + + default, PF.BON.00.HH? : display default and all HH streams + of PF.BON.00 + + + + + Add new streams automatically to the streams configured in + streams.codes when waveform QC parameters are provided for + these streams. + + + + + + + Control the stream widget which opens when clicking on a stream + name in the detailed table. + + + + Length of data to be displayed. + + + + + + + Parameters controlling the overall QC score of a station. + The score is computed from the valid count parameter of the + defined ranges. It is vizualized in the QcOverview. + + + + + Ranges to be considered for forming the score. Set the + count parameter om the respective range section for + controlling the contribution of this range. The + range parameters control the coloring. + + + + + + + + Default configuration for all QC parameters. The configuration + can be extended to each QC parameter by placing "default" with + the parameter name in scqcv.cfg. Examples for some QC parameters + are given below. + + Read the scqc documentation for a list of available QC + parameters and the default configuration of scqcv + $SEISCOMP_ROOT/etc/defaults/scqcv.cfg for more examples. + + + + Names of range profile to be considered. + The range profiles define the background color table + fields depending on the field value. Add the default + ranges for which different intervals and the color are + configured. + + + + + Values are: int, float, percent, timeSpan + + Displays raw values if unset. + + + + + Default time in seconds, how long a value is displayed in + scqcv if no update is received. Setting to 0 means, show + value until updated. + + + + + A color defined by the color definitions below. + + + + + Activate to display absolute values (modulus). + + + + + Possible values: enableStream, disableStream + + + + + Range parameters overriding parameters from the general + range section below. Unset values or range profiles + undefined here will be inherited from the general range + section. + + + + Value interval for range sane + + + + + Value interval for range inter + + + + + Value interval for range bad + + + + + + Contribution of this range for computing the score. + The range must be considered. + + + + + A color defined by the color definitions below. + + + + + Possible values: enableStream, disableStream + + + + + + + + + Parameters controlling the display of the QC parameter + 'availability' + + + + Names of range profile to be considered + The range profiles s define the background color table + fields depending on the field value. Add the default + ranges for which different intervals and the color are + configured. + + + + + Values are: int, float, percent, timeSpan + + Displays raw values if unset. + + + + + Default time in seconds, how long a value is displayed in + scqcv if no update is received. Setting to 0 means, show + value until updated. + + + + + A color defined by the color definitions below. + + + + + Activate to display absolute values (modulus). + + + + + Possible values: enableStream, disableStream + + + + + Range parameters overriding parameters from the general + range section below. Unset values or range profiles + undefined here will be inherited from the general range + section. + + + + Value interval for range sane + + + + + Value interval for range inter + + + + + Value interval for range bad + + + + + + Contribution of this range for computing the score. + The range must be considered. + + + + + A color defined by the color definitions below. + + + + + Possible values: enableStream, disableStream + + + + + + + + + Parameters controlling the display of the QC parameter 'delay' + + + + Names of range profile to be considered + The range profiles s define the background color table + fields depending on the field value. Add the default + ranges for which different intervals and the color are + configured. + + + + + Values are: int, float, percent, timeSpan + + Displays raw values if unset. + + + + + Default time in seconds, how long a value is displayed in + scqcv if no update is received. Setting to 0 means, show + value until updated. + + + + + A color defined by the color definitions below. + + + + + Activate to display absolute values (modulus). + + + + + Possible values: enableStream, disableStream + + + + + Range parameters overriding parameters from the general + range section below. Unset values or range profiles + undefined here will be inherited from the general range + section. + + + + Value interval for range sane + + + + + Value interval for range inter + + + + + Value interval for range bad + + + + + + Contribution of this range for computing the score. + The range must be considered. + + + + + A color defined by the color definitions below. + + + + + Possible values: enableStream, disableStream + + + + + + + + + Parameters controlling the display of the QC parameter + 'gaps count'. + + + + Names of range profile to be considered + The range profiles s define the background color table + fields depending on the field value. Add the default + ranges for which different intervals and the color are + configured. + + + + + Values are: int, float, percent, timeSpan + + Displays raw values if unset. + + + + + Default time in seconds, how long a value is displayed in + scqcv if no update is received. Setting to 0 means, show + value until updated. + + + + + A color defined by the color definitions below. + + + + + Activate to display absolute values (modulus). + + + + + Possible values: enableStream, disableStream + + + + + Range parameters overriding parameters from the general + range section below. Unset values or range profiles + undefined here will be inherited from the general range + section. + + + + Value interval for range sane + + + + + Value interval for range inter + + + + + Value interval for range bad + + + + + + Contribution of this range for computing the score. + The range must be considered. + + + + + A color defined by the color definitions below. + + + + + Possible values: enableStream, disableStream + + + + + + + + + Parameters controlling the display of the QC parameter + 'gaps length' + + + + Names of range profile to be considered + The range profiles s define the background color table + fields depending on the field value. Add the default + ranges for which different intervals and the color are + configured. + + + + + Values are: int, float, percent, timeSpan + + Displays raw values if unset. + + + + + Default time in seconds, how long a value is displayed in + scqcv if no update is received. Setting to 0 means, show + value until updated. + + + + + A color defined by the color definitions below. + + + + + Activate to display absolute values (modulus). + + + + + Possible values: enableStream, disableStream. + + + + + Range parameters overriding parameters from the general + range section below. Unset values or range profiles + undefined here will be inherited from the general range + section. + + + + Value interval for range sane. + + + + + Value interval for range inter. + + + + + Value interval for range bad. + + + + + + Contribution of this range for computing the score. + The range must be considered. + + + + + A color defined by the color definitions below. + + + + + Possible values: enableStream, disableStream + + + + + + + + + Parameters controlling the display of the QC parameter 'gaps interval' + + + + Names of range profile to be considered + The range profiles s define the background color table + fields depending on the field value. Add the default + ranges for which different intervals and the color are + configured. + + + + + Values are: int, float, percent, timeSpan + + Displays raw values if unset. + + + + + Default time in seconds, how long a value is displayed in scqcv + if no update is received. Setting to 0 means, show value until + updated. + + + + + A color defined by the color definitions below. + + + + + Activate to display absolute values (modulus). + + + + + Possible values: enableStream, disableStream + + + + + Range parameters overriding parameters from the general + range section below. Unset values or range profiles + undefined here will be inherited from the general range + section. + + + + Value interval for range sane + + + + + Value interval for range inter + + + + + Value interval for range bad + + + + + + Contribution of this range for computing the score. + The range must be considered. + + + + + A color defined by the color definitions below. + + + + + Possible values: enableStream, disableStream + + + + + + + + + Parameters controlling the display of the QC parameter 'latency' + + + + Names of range profile to be considered + The range profiles s define the background color table + fields depending on the field value. Add the default + ranges for which different intervals and the color are + configured. + + + + + Values are: int, float, percent, timeSpan + + Displays raw values if unset. + + + + + Default time in seconds, how long a value is displayed in scqcv + if no update is received. Setting to 0 means, show value until + updated. + + + + + A color defined by the color definitions below. + + + + + Activate to display absolute values (modulus). + + + + + Possible values: enableStream, disableStream + + + + + Range parameters overriding parameters from the general + range section below. Unset values or range profiles + undefined here will be inherited from the general range + section. + + + + Value interval for range sane + + + + + Value interval for range inter + + + + + Value interval for range bad + + + + + + Contribution of this range for computing the score. + The range must be considered. + + + + + A color defined by the color definitions below. + + + + + Possible values: enableStream, disableStream + + + + + + + + + Parameters controlling the display of the QC parameter 'offset' + + + + Names of range profile to be considered + The range profiles s define the background color table + fields depending on the field value. Add the default + ranges for which different intervals and the color are + configured. + + + + + Values are: int, float, percent, timeSpan + + Displays raw values if unset. + + Displays raw values if unset. + + + + + Default time in seconds, how long a value is displayed in scqcv + if no update is received. Setting to 0 means, show value until + updated. + + + + + A color defined by the color definitions below. + + + + + Activate to display absolute values (modulus). + + + + + Possible values: enableStream, disableStream + + + + + Range parameters overriding parameters from the general + range section below. Unset values or range profiles + undefined here will be inherited from the general range + section. + + + + Value interval for range sane + + + + + Value interval for range inter + + + + + Value interval for range bad + + + + + + Contribution of this range for computing the score. + The range must be considered. + + + + + A color defined by the color definitions below. + + + + + Possible values: enableStream, disableStream + + + + + + + + + Parameters controlling the display of the QC parameter 'overlaps count' + + + + Names of range profile to be considered + The range profiles s define the background color table + fields depending on the field value. Add the default + ranges for which different intervals and the color are + configured. + + + + + Values are: int, float, percent, timeSpan + + Displays raw values if unset. + + + + + Default time in seconds, how long a value is displayed in scqcv + if no update is received. Setting to 0 means, show value until + updated. + + + + + A color defined by the color definitions below. + + + + + Activate to display absolute values (modulus). + + + + + Possible values: enableStream, disableStream + + + + + Range parameters overriding parameters from the general + range section below. Unset values or range profiles + undefined here will be inherited from the general range + section. + + + + Value interval for range sane + + + + + Value interval for range inter + + + + + Value interval for range bad + + + + + + Contribution of this range for computing the score. + The range must be considered. + + + + + A color defined by the color definitions below. + + + + + Possible values: enableStream, disableStream + + + + + + + + + Parameters controlling the display of the QC parameter 'overlaps interval' + + + + Names of range profile to be considered + The range profiles s define the background color table + fields depending on the field value. Add the default + ranges for which different intervals and the color are + configured. + + + + + Values are: int, float, percent, timeSpan + + Displays raw values if unset. + + + + + Default time in seconds, how long a value is displayed in + scqcv if no update is received. Setting to 0 means, show + value until updated. + + + + + A color defined by the color definitions below. + + + + + Activate to display absolute values (modulus). + + + + + Possible values: enableStream, disableStream + + + + + Range parameters overriding parameters from the general + range section below. Unset values or range profiles + undefined here will be inherited from the general range + section. + + + + Value interval for range sane + + + + + Value interval for range inter + + + + + Value interval for range bad + + + + + + Contribution of this range for computing the score. + The range must be considered. + + + + + A color defined by the color definitions below. + + + + + Possible values: enableStream, disableStream + + + + + + + + + Parameters controlling the display of the QC parameter + 'overlaps length' + + + + Names of range profile to be considered + The range profiles s define the background color table + fields depending on the field value. Add the default + ranges for which different intervals and the color are + configured. + + + + + Values are: int, float, percent, timeSpan + + Displays raw values if unset. + + + + + Default time in seconds, how long a value is displayed in + scqcv if no update is received. Setting to 0 means, show + value until updated. + + + + + A color defined by the color definitions below. + + + + + Activate to display absolute values (modulus). + + + + + Possible values: enableStream, disableStream + + + + + Range parameters overriding parameters from the general + range section below. Unset values or range profiles + undefined here will be inherited from the general range + section. + + + + Value interval for range sane + + + + + Value interval for range inter + + + + + Value interval for range bad + + + + + + Contribution of this range for computing the score. + The range must be considered. + + + + + A color defined by the color definitions below. + + + + + Possible values: enableStream, disableStream + + + + + + + + + Parameters controlling the display of the QC parameter 'rms' + + + + Names of range profile to be considered + The range profiles s define the background color table + fields depending on the field value. Add the default + ranges for which different intervals and the color are + configured. + + + + + Values are: int, float, percent, timeSpan + + Displays raw values if unset. + + + + + Default time in seconds, how long a value is displayed in + scqcv if no update is received. Setting to 0 means, show + value until updated. + + + + + A color defined by the color definitions below. + + + + + Activate to display absolute values (modulus). + + + + + Possible values: enableStream, disableStream + + + + + + Range parameters overriding parameters from the general + range section below. Unset values or range profiles + undefined here will be inherited from the general range + section. + + + + Value interval for range sane + + + + + Value interval for range inter + + + + + Value interval for range bad + + + + + + Contribution of this range for computing the score. + The range must be considered. + + + + + A color defined by the color definitions below. + + + + + Possible values: enableStream, disableStream + + + + + + + + + Parameters controlling the display of the QC parameter + 'spikes count' + + + + Names of range profile to be considered. + The range profiles s define the background color table + fields depending on the field value. Add the default + ranges for which different intervals and the color are + configured. + + + + + Values are: int, float, percent, timeSpan + + Displays raw values if unset. + + + + + Default time in seconds, how long a value is displayed in + scqcv if no update is received. Setting to 0 means, show + value until updated. + + + + + A color defined by the color definitions below. + + + + + Activate to display absolute values (modulus). + + + + + Possible values: enableStream, disableStream + + + + + + Range parameters overriding parameters from the general + range section below. Unset values or range profiles + undefined here will be inherited from the general range + section. + + + + Value interval for range sane + + + + + Value interval for range inter + + + + + Value interval for range bad + + + + + + Contribution of this range for computing the score. + The range must be considered. + + + + + A color defined by the color definitions below. + + + + + Possible values: enableStream, disableStream + + + + + + + + + Parameters controlling the display of the QC parameter + 'spikes amplitude'. + + + + Names of range profile to be considered + The range profiles s define the background color table + fields depending on the field value. Add the default + ranges for which different intervals and the color are + configured. + + + + + Values are: int, float, percent, timeSpan + + Displays raw values if unset. + + + + + Default time in seconds, how long a value is displayed in + scqcv if no update is received. Setting to 0 means, show + value until updated. + + + + + A color defined by the color definitions below. + + + + + Activate to display absolute values (modulus). + + + + + Possible values: enableStream, disableStream + + + + + Range parameters overriding parameters from the general + range section below. Unset values or range profiles + undefined here will be inherited from the general range + section. + + + + Value interval for range sane + + + + + Value interval for range inter + + + + + Value interval for range bad + + + + + + Contribution of this range for computing the score. + The range must be considered. + + + + + A color defined by the color definitions below. + + + + + Possible values: enableStream, disableStream + + + + + + + + + Parameters controlling the display of the QC parameter + 'spikes interval'. + + + + Names of range profile to be considered + The range profiles s define the background color table + fields depending on the field value. Add the default + ranges for which different intervals and the color are + configured. + + + + + Values are: int, float, percent, timeSpan + + Displays raw values if unset. + + + + + Default time in seconds, how long a value is displayed in + scqcv if no update is received. Setting to 0 means, show + value until updated. + + + + + A color defined by the color definitions below. + + + + + Activate to display absolute values (modulus). + + + + + Possible values: enableStream, disableStream + + + + + Range parameters overriding parameters from the general + range section below. Unset values or range profiles + undefined here will be inherited from the general range + section. + + + + Value interval for range sane + + + + + Value interval for range inter + + + + + Value interval for range bad + + + + + + Contribution of this range for computing the score. + The range must be considered. + + + + + A color defined by the color definitions below. + + + + + Possible values: enableStream, disableStream + + + + + + + + + Parameters controlling the display of the QC parameter + 'timing quality'. + + + + Names of range profile to be considered + The range profiles s define the background color table + fields depending on the field value. Add the default + ranges for which different intervals and the color are + configured. + + + + + Values are: int, float, percent, timeSpan + + Displays raw values if unset. + + + + + Default time in seconds, how long a value is displayed in + scqcv if no update is received. Setting to 0 means, show + value until updated. + + + + + A color defined by the color definitions below. + + + + + Activate to display absolute values (modulus). + + + + + Possible values: enableStream, disableStream + + + + + Range parameters overriding parameters from the general + range section below. Unset values or range profiles + undefined here will be inherited from the general range + section. + + + + Value interval for range sane + + + + + Value interval for range inter + + + + + Value interval for range bad + + + + + + Contribution of this range for computing the score. + The range must be considered. + + + + + A color defined by the color definitions below. + + + + + Possible values: enableStream, disableStream + + + + + + + + + Range parameters defining default values which can be overridden + per QC parameter in the section of this parameter. + More ranges can be added by adjusting scqcv.cfg e.g. in + @SYSTEMCONFIGDIR@/. + + + + Value interval for range sane. + + + + + Value interval for range inter. + + + + + Value interval for range bad. + + + + + Value interval for range profile above. + + + + + Value interval for range profile below. + + + + + Value interval for range profile delay. + + + + + Value interval for range profile latency. + + + + + Value interval for range profile timing. + + + + + Value interval for range profile unset. + + + + + Value interval for range profile r01. + + + + + Value interval for range profile r02. + + + + + Value interval for range profile r03. + + + + + Value interval for range profile r04. + + + + + Value interval for range profile r05. + + + + + Value interval for range profile r06. + + + + + Value interval for range profile r07. + + + + + Value interval for range profile r08. + + + + + Value interval for range profile r09. + + + + + Value interval for range profile r10. + + + + + + Contribution of this range for computing the score. + The range must be considered. The count is also set in + [parameter].range.[range profile].count . + + + + + A color defined by the color definitions below. + + + + + Possible values: enableStream, disableStream + + + + + + + + Color definitions. More colors can be added by adjusting + scqcv.cfg e.g. in @SYSTEMCONFIGDIR@/. + + + + Color names or R, G, B, alpha values + + + + + Color names or R, G, B, alpha values + + + + + Color names or R, G, B, alpha values + + + + + Color names or R, G, B, alpha values + + + + + Color names or R, G, B, alpha values + + + + + Color names or R, G, B, alpha values + + + + + Color names or R, G, B, alpha values + + + + + Color names or R, G, B, alpha values + + + + + Color names or R, G, B, alpha values + + + + + Color names or R, G, B, alpha values + + + + + Color names or R, G, B, alpha values + + + + + Color names or R, G, B, alpha values + + + + + Color names or R, G, B, alpha values + + + + + Color names or R, G, B, alpha values + + + + + Color names or R, G, B, alpha values + + + + + Color names or R, G, B, alpha values + + + + + Color names or R, G, B, alpha values + + + + + Color names or R, G, B, alpha values + + + + + + + Format describtions for showing values. The format parameter + is defined seprately for earch parameter but the actual format + can be adjusted here. + + + + Number of decimal values shown for parameters of format float. + + + + + + + scqcv [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + generic#print-config-vars + generic#validate-schema-params + generic#dump-settings + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + verbosity#print-context + verbosity#print-component + verbosity#log-utc + verbosity#trace + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + records#record-driver-list + records#record-url + records#record-file + records#record-type + + + + cities#city-xml + + + + gui#full-screen + gui#non-interactive + + + + + diff --git a/etc/descriptions/scquery.xml b/etc/descriptions/scquery.xml new file mode 100644 index 0000000..5a8f742 --- /dev/null +++ b/etc/descriptions/scquery.xml @@ -0,0 +1,68 @@ + + + + + Read database objects and writes them to the command line. + + + + + + scquery [OPTIONS] [query name] [query parameters] + + + generic#help + generic#version + generic#config-file + generic#plugins + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + database#db-driver-list + database#database + + + + + + + + + + + + diff --git a/etc/descriptions/scqueryqc.xml b/etc/descriptions/scqueryqc.xml new file mode 100644 index 0000000..81bc000 --- /dev/null +++ b/etc/descriptions/scqueryqc.xml @@ -0,0 +1,85 @@ + + + + + Query waveform quality control (QC) parameters from database. + + + + scqueryqc [options] + + + generic#help + generic#version + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + database#db-driver-list + database#database + database#inventory-db + + + + + + + + + + + + + + + + + diff --git a/etc/descriptions/screloc.xml b/etc/descriptions/screloc.xml new file mode 100644 index 0000000..c96bf7c --- /dev/null +++ b/etc/descriptions/screloc.xml @@ -0,0 +1,153 @@ + + + + Automatic relocator. + + + + + Defines the locator to be used such as NonLinLoc. + + + + + The locator profile to use. + + + + + Ignores origins with status REJECTED. + + + + + Triggers processing also on origins with status PRELIMINARY. + + + + + Triggers processing also on origins with mode MANUAL. Otherwise + only origins with mode AUTOMATIC are considered. + + + + + Whether to use the weight of the picks associated with the + input origin as defined in the input origin as input for + the locator or not. If false then all picks associated with + the input origin will be forwarded to the locator with full + weight. + + + + + If the input origin's depth is fixed then it will be fixed + during the relocation process as well. + + + + + Whether to store the ID of the input origin as comment in the + relocated origin or not. + + + + + Suffix appended to the publicID of the origin to be relocated + to form the new publicID. This + helps to identify pairs of origins before and after relocation. + However, new publicIDs are unrelated to the time of creation. + If not defined, a new publicID will be generated automatically. + + + + + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + + + + + + + + + + + + + + + + + + diff --git a/etc/descriptions/scrttv.xml b/etc/descriptions/scrttv.xml new file mode 100644 index 0000000..6dc233c --- /dev/null +++ b/etc/descriptions/scrttv.xml @@ -0,0 +1,476 @@ + + + + Real-time trace view. + + + + If greater than 0, then all traces for which the data latency is + higher than this value are hidden. + + + + + If enabled, then all traces are sorted by distance when a new + origin arrives. + + + + + If enabled, picks are shown. + + + + + Define the filter to be used when filtering is activated. This + is only being used if filters is not set otherwise it is + overridden. This option is mainly for backward compatibility. + + + + + Define a list of filters that is cycles through when pressing 'f'. + This options supersedes the filter option. If not defined then + filter is used instead. If defined then this filter list is + used exclusively and the filter option is ignored. + + + + + Activate the first filter of the configured filter list + after startup. This is equivalent to pressing 'f'. + + + + + Define the buffer size in seconds of the ring bu of each trace. + + + + + If set to true, all traces will be visible on application startup + independent of data availability. + + + + + Time span in seconds to switch back to the last view after an origin + caused resorting. The default is 15min. + + + + + + Configure the initial stream sorting. + + + + The sort mode applied initially. Allowed values + are: config, distance, station, network, group. + + + + + Latitude of the initial location for sorting traces. + Only valid if mode == distance. + + + + + Longitude of the initial location for sorting traces. + Only valid if mode == distance. + + + + + + The list of channel codes to be displayed. List items + may contain wildcards at any position and are separated + by comma. Wildcard support depends on RecordStream, + e.g.: + + caps/sdsarchive: *.*.*.* + + slink: NET.STA.*.* + + The channel list is intersected with all channels + configured in inventory unless --no-inventory is used. + + Examples: + + default : display all streams configured by global + bindings + + default, PF.BON.00.HH? : display default and all HH + streams of PF.BON.00 + + + + + If not empty then all stream patterns are part of the blacklist. + The blacklist is only active if "streams.codes" + is omitted and the default stream list according to the + bindings is to be shown. Each pattern can include wildcards + (either ? or *). The pattern is checked against the channel + id which is a concatenation of network code, station code, + location code and channel code separated with a dot, + e.g. "GE.MORC..BHZ". + + + + + Number of rows to show at once in one windows. If more traces + than rows are loaded, the are accessible by a scroll bar. + + + + + Define a region used for clipping requested stations. + + + + Minimum longitude. + + + + + Maximum longitude. + + + + + Minimum latitude. + + + + + Maximum latitude. + + + + + + Stream group profiles to be considered which must be defined in + group section. Use comma separation for a list of groups. + + + + + Stream profiles to be considered which must be defined in + profile section. Use comma separation for a list of profiles. + + + + + + Definiton of stream groups shown in scrttv with unique features. + Register the profiles in "groups" to apply them. + + + + + List of channels codes to be displayed within + this group. List items may contain wildcards at any position + and are separated by comma. + The list is intersected with all channels configured in inventory. + + Example: + + CX.*..BH?,PF.BON.00.HH? : all BH streams of the CX network + and all HH streams of PF.BON.00 + + + + + Title of the group visible as a tooltip of the traces. + + + + + Define the trace pen of the group. + + + + The color of the pen. If not given, the default + trace color is being used. The parameter is overridden + by "streams.group.$profile.gradient" . + + + + + Define the color gradient used to generate the + trace color for each group member. When given, the + value in "streams.group.$profile.pen.color" + is ignored. The colors are distributed equally and + given in hexadecimal representation or by or + :term:`color keyword names`. + The stop points + can be set at any value. The final trace color + will be interpolated from the normalized gradient + where the value range is scaled to [0,1]. + + Format: value1:color1,value2:color2 + + Examples: + + 0:yellow,1:green + + 0:FFBF00,1:C70039 + + + + + The style of the pen. Supported values are: NoPen, + SolidLine, DashLine, DotLine, DashDotLine, + DashDotDotLine. + + + + + The width of the pen. + + + + + + + + + Definiton of profiles for streams shown with unique decorations. + Register the profiles in "profiles" to apply them. + + + + + Stream applying this profile to. Use regular expressions + for defining multiple streams. Examples: + + GR.BFO.00.BHZ: One stream on vertical component + + + GR.*.00.BHZ: All stations from GR network and their + vertical components. + + + + + Text added to streams. + + + + + Fraction of trace amplitude added to trace widgets. + + + + + The physical unit shown along with stream maximum and + minimum values. + + + + + The gain applied to scale trace amplitudes. 0 disables + showing trace amplitude value + + + + + + + + + Properties defining the minimum line on each trace. + + + + Value at which to draw a line. + + + + + Line properties. + + + + + + + + + + + + + + + + + Properties of the area below the minimum. + + + + + + + + + + + + + + Properties defining the maximum line on each trace. + + + + Value at which to draw a line. + + + + + Line properties. + + + + + + + + + + + + + + + + + Properties of the area above the maximum. + + + + + + + + + + + + + + + + + + scrttv [options] [miniSEED file] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + generic#print-config-vars + generic#validate-schema-params + generic#dump-settings + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + verbosity#print-context + verbosity#print-component + verbosity#log-utc + verbosity#trace + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + records#record-driver-list + records#record-url + records#record-file + records#record-type + + + + + + + + + + + + + cities#city-xml + + + + gui#full-screen + gui#non-interactive + + + + diff --git a/etc/descriptions/scsendjournal.xml b/etc/descriptions/scsendjournal.xml new file mode 100644 index 0000000..89f6092 --- /dev/null +++ b/etc/descriptions/scsendjournal.xml @@ -0,0 +1,48 @@ + + + + + Send journaling information to the messaging to manipulate event parameter objects. + + + + scsendjournal [opts] {objectID} {action} [parameters] + + + + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#encoding + messaging#start-stop-msg + + + + diff --git a/etc/descriptions/scsendorigin.xml b/etc/descriptions/scsendorigin.xml new file mode 100644 index 0000000..f10c0d0 --- /dev/null +++ b/etc/descriptions/scsendorigin.xml @@ -0,0 +1,60 @@ + + + + + Create an artificial origin and send to the messaging. + + + + scsendorigin [options] + + + + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#encoding + messaging#start-stop-msg + + + + + + + + diff --git a/etc/descriptions/scsohlog.xml b/etc/descriptions/scsohlog.xml new file mode 100644 index 0000000..cc92bba --- /dev/null +++ b/etc/descriptions/scsohlog.xml @@ -0,0 +1,87 @@ + + + + + Connect to the messaging and collect all information sent from connected + clients. + + + + scsohlog [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + + + + + + + + + + + + The output interval in seconds. + + + + + The output XML file. + + + + + The script to execute. + Example: @CONFIGDIR@/scripts/scsohlog_script.sh. + + + + + + + diff --git a/etc/descriptions/scvoice.xml b/etc/descriptions/scvoice.xml new file mode 100644 index 0000000..5462a7a --- /dev/null +++ b/etc/descriptions/scvoice.xml @@ -0,0 +1,142 @@ + + + + Real time voice alert. + + + + Treat an event as new event when it is seen the first time. + + + + + Define a white list of agencyIDs to alert an event. The + agencyID is extracted from the preferred origin of the event. + If this list is empty, all agencies are allowed. + + + + + + The default message string for the event-script is + "earthquake, [HOURS] hours [MINS] minutes ago, [DESC], + magnitude [MAG], depth [DEP] kilometers" whereas [DESC] + is the string given in the event.description attribute. + This string can be overwritten using one of the following + options. There are three placeholders that can be used: + @region@, @dist@ and @poi@. + Example: "@region@, @dist@ kilometers from @poi@ away". + + + + + When using the nearest point of interest (city) as part of the + message string, specify the maximum distance in degrees from the + event. Any point of interest farther away will be ignored. + + + + + Minimum population for a city to become a point of interest. + + + + + + + Specify the script to be called if an amplitude + arrives, network-, stationcode and amplitude are passed + as parameters $1, $2 and $3. + + + + + Specify the script to be called if a preliminary + origin arrives, latitude and longitude are passed as + parameters $1 and $2. + + + + + Specify the script to be called when an event has been + declared; the message string, a flag (1=new event, + 0=update event), the EventID, the arrival count and the + magnitude (optional when set) are passed as parameter + $1, $2, $3, $4 and $5. + + + + + + + scvoice [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + + + + + + + + + + + + diff --git a/etc/descriptions/scwfas.xml b/etc/descriptions/scwfas.xml new file mode 100644 index 0000000..f809d0c --- /dev/null +++ b/etc/descriptions/scwfas.xml @@ -0,0 +1,86 @@ + + + + Waveform archive server + + + scwfas [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#print-component + verbosity#print-context + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#trace + verbosity#log-file + + + + + + + + + + Defines an alternative SDS archive handler. This is the name + of an RecordStream interface that can be loaded via a plugin. + If not given, an internal implementation will be used. + + + + + The filebase of the SDS archive. If an alternative archive + handler is defined by "handlerSDS", this value serves + as input to setSource(). + + + + + + The server port for Arclink connections. -1 + deactivates the Arclink server. The standard Arclink port is + 18001. + + + + + + + The server port for FDSNWS connections. -1 + deactivates the FDSN Web server. + + + + + The base URL of the FDSN Web service that is + given in the WADL document. + + + + + The aggregated maximum time window (seconds) + for all requested streams. A value of 0 will deactive + any restriction. + + + + + + diff --git a/etc/descriptions/scwfparam.xml b/etc/descriptions/scwfparam.xml new file mode 100644 index 0000000..ca003e0 --- /dev/null +++ b/etc/descriptions/scwfparam.xml @@ -0,0 +1,575 @@ + + + + Waveform parametrization module. + + + + + The path to the processing info logfile. + + + + + Defines the white- and blacklist of data streams to be used. The + rules to decide if a stream is used or not are the following: + + 1. if whitelist is not empty and the stream is not on the whitelist, + don't use it, ok otherwise + + 2. if blacklist is not empty and the stream is on the blacklist, + don't use it, ok otherwise + + Both checks are made and combined with AND. Either whitelist or + blacklist contains a list of patterns (wildcard allowed as * and ?), + eg GE.*.*.*, *, GE.MORC.*.BH? Each stream id (NET.STA.LOC.CHA) will + be checked against the defined patterns. + + + The stream whitelist + + + The stream blacklist + + + + + Default value of total time window length in seconds if + wfparam.magnitudeTimeWindowTable is not specified. This times window + includes wfparam.preEventWindowLength. + + + + + Magnitude dependent time window table. The format is + "mag1:secs1, mag2:secs2, mag3:secs3". If a magnitude falls + between two configured magnitudes the time window of the lower + magnitude is used then. No interpolation is performed. Magnitude + outside the configured range are clipped to the lowest/highest value. + Example: "3:100, 4:200, 5:300" + + + + + The pre event time window length in seconds. + + + + + The maximum epicentral distance in km of a station being considered + for processing. This value is used if wfparam.magnitudeDistanceTable + is not specified. + + + + + Analogue to wfparam.magnitudeTimeWindowTable but instead giving a + time window, the distance in km is specified. + Example: "3:400, 4:450, 5:500" + + + + + Relative saturation threshold in percent. If the absolute raw amplitude + exceeds X% of 2**23 counts the station will be excluded from + processing. This assumes a 24bit datalogger. + + + + + Specifies the STA length in seconds of the applied STA/LTA check. + + + + + Specifies the LTA length in seconds of the applied STA/LTA check. + + + + + Specifies the minimum STALTA ratio to be reached to further process + a station. + + + + + Specifies the number of seconds around P to be used to check the STA/LTA ratio. + + + + + Defines the factor applied to the signigicant duration to define the + processing spetra time window. If that value is <= 0 the totalTimeWindowLength + is used. + + + + + Specifies a list of damping values (in percent) for computation of + the relative displacement elastic response spectrum. + Example: "5,10,15" + + + + + Specifies the number of natural periods for computation of the + relative displacement elastic response spectrum between Tmin and Tmax. + If fixed is given then a fixed list of periods is used. + + + + + + Defines if a linear spacing or logarithmic spacing between Tmin and + Tmax is used. The default is a linear spacing. The logarithmic + spacing will fail if either Tmin or Tmax is 0. + + + + + + Specifies the minimum period (Tmin) in seconds for computation of the + relative displacement elastic response spectrum. + + + + + Specifies the maximum period (Tmax) in seconds for computation of the + relative displacement elastic response spectrum. + + + + + Should the maximum period (Tmax) clipped against the + configured filter lower corner frequency, the maximum of + pd.loFreq or filter.loFreq. + + + + + Enables/disables aftershock removal (Figini, 2006; Paolucci et al., 2008) + + + + + Enables/disables pre-event cut-off. A hardcoded sta/lta algorithm + (with sta=0.1s, lta=2s, sta/lta threshold=1.2) is run on the time + window defined by (expected_P_arrival_time - 15 s). The pre-event + window is hence defined as + [t(sta/lta =1.2) - 15.5s, t(sta/lta =1.2) - 0.5s]. + + + + Magnitude dependent filter table. The format is + "mag1:fmin1;fmax1, mag2:fmin2;fmax2, mag3:fmin3;fmax3". + If a magnitude falls between two configured magnitudes the filter of + the lower magnitude is then used. No interpolation takes place. + Magnitude outside the configured range are clipped to the + lowest/highest value. + Frequency values are given as simple positive doubles (Hz is assumed) + or with suffix "fNyquist" which is then multiplied by the + Nyquist frequency of the data to get the final corner frequency. + + + + Parameters of the 1st stage filter. + + + + Specifies the order of the 1st stage filter. + + + + + Specifies the frequency of the 1st stage hi-pass filter. If this + parameter is equal to 0 the hi-pass filter is not used. + If suffix "fNyquist" is used then the value is multiplied + by the Nyquist frequency of the data to get the final corner + frequency of the filter. + + + + + Specifies the frequency of the 1st stage lo-pass filter. If this + parameter is equal to 0 the lo-pass filter is not used. + If suffix "fNyquist" is used then the value is multiplied + by the Nyquist frequency of the data to get the final corner + frequency of the filter. + + + + + + Parameters of the post-deconvolution filter applied in the + frequency domain. + + + + Specifies the order of the 2nd stage filter. + + + + + Specifies the frequency of the 2nd stage hi-pass filter. If this + parameter is equal to 0 the hi-pass filter is not used. + If suffix "fNyquist" is used then the value is multiplied + by the Nyquist frequency of the data to get the final corner + frequency of the filter. + + + + + Specifies the frequency of the 2nd stage lo-pass filter. If this + parameter is equal to 0 the lo-pass filter is not used. + If suffix "fNyquist" is used then the value is multiplied + by the Nyquist frequency of the data to get the final corner + frequency of the filter. + + + + + + Enables/disables deconvolution. If a channel does not provide full + response information it is not used for processing. + + + + + + Enables non-causal filtering in the frequency domain. + + + + + Defines the cosine taper length in seconds if non-causal filters + are activated applied on either side of the waveform. If a + negative length is given 10 percent of the pre-event window length + is used on either side of the waveform. + + + + + The length of the zero padding window in seconds applied on either + side of the waveform if non-causal filters are activated. If + negative, it is computed following Boore (2005) as + 1.5*order/corner_freq and applied half at the beginning and half at + the end of the waveform. + + + + + + + Specifies the interval in seconds to check/start scheduled operations. + + + + + Specifies the maximum allowed idle time of a process before removed. + The idle time is calculated if no further processing is scheduled + and computes as: [now]-lastRun. + + + + + Enables/disables updating of a cron log file. This file will be + created at ~/.seiscomp/log/[appname].sched + and contains information about the scheduled events and the + processing queue. The file is updated each n seconds, + where n = wfparam.cron.wakeupInterval. + + + + + Specifies the delay in seconds to delay processing if a new + authoritative origin arrives for an event. + + + + + Specifies a list of delay times in seconds relative to event time + to trigger the processing. When the first origin of an event arrives + this list is used to construct the crontab for this event. + Example: "60, 120, 300, 3600" + + + + + + + Specifies the initial acquisition timeout. If the acquisition + source (e.g. Arclink) does not respond within this threshold with + waveforms, the request is discarded. + + + + + Specifies the acquisition timeout when waveforms are being + transfered. If no new waveforms arrive within this threshold, the + request is aborted. This is important if a Seedlink connection is + configured which can block the application for a very long time if + at least one requested channel has no data. Seedlink does not + finished the request until all data has been sent. When data will + arrive for a particular channel is not known. + + + + + + + Enables messaging output which creates objects of the + StrongMotionParameters data model extension (defined by SED) and + sends them to scmaster. In order to save the objects to the database, + scmaster needs to load the dmsm plugin and the corresponding database + schema must be applied. + The default message group is AMPLITUDE. To change this group redefine + connection.primaryGroup. + + + + + Uses short event ids when an event output directory needs to be + created. The default pattern is [eventtime]_[mag]_[lat]_[lon]_[updatetime]. + The short format just contains the first part, namely [eventtime] in + the format YEARmmddHHMMSS. + + + + + + Enables/disables the output of processed waveforms. + + + + + Specifies the waveform output path. This parameter is only used if + wfparam.output.waveforms.enable is true. + + + + + Enables/disables the creation of an event directory (named with + eventID) when storing the processed waveforms. This parameter is + only used if wfparam.output.waveforms.enable is true. + + + + + + + Enables/disables the output of spectra (psa, drs). The output + format is a simple ASCII file where the first column is the + period and the second column the corresponding value. + + + + + Specifies the spectra output path. This parameter is only used if + wfparam.output.spectra.enable is true. + + + + + Enables/disables the creation of an event directory (named with + eventID) when storing the spectra. This parameter is only used if + wfparam.output.spectra.enable is true. + + + + + + + Enables/disables ShakeMap XML output. + + + + + Specifies the ShakeMap XML output path. This is only used if + wfparam.output.shakeMap.enable is set to true. + + + + + Specifies the path to a script that is called whenever a new + ShakeMap XML is available. The script is called with 3 parameters: + EventID, modified ShakeMap eventID, path to event directory (where + input/event.xml and input/event_dat.xml lives). + The event files are not deleted by the application. The ownership + goes to the called script. + + + + + Enables/disables synchronous or asynchronous script calls. If + enabled, be careful to not spend too much time in the script. + The application is blocked while the script is running. + + + + + If enabled the maximum PGV, PGA, PSA03, PSA10 and PSA30 of both + horizontal components is used in the final output. Otherwise each + component is saved. + + + + + Uses the SeisComP event publicID as id attribute of the + earthquake tag, a generated ShakeMapID otherwise. + + + + + Uses the event region name (if available) for the locstring + attribute, the publicID, lat, lon otherwise. + + + + + The XML encoding string written to the Shakemap XML file. + + + + + The target version of the Shakemap input files. + + + + + + + Defines the magnitude tolerance to completely reprocess an event with + respect to the last state. + + + + + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + records#record-driver-list + records#record-url + records#record-file + records#record-type + + + + --dump-records Dumps all received records (binary) to recs.in + --dump-config Dump the configuration and exit + + + + + + + + + + + + + + + + + + + + + + + Defines the saturation threshold for the optional saturation check. + By default the saturation check is configured for all stations + as module parameter. This parameters overrides the threshold + per station. + + This value can either be an absolute value such as "100000" + or a relative value (optionally in percent) with respect to + the number of effective bits, e.g. "0.8@23" or + "80%@23". The first version uses 1**23 * 0.8 + whereas the latter uses 1**23 * 80/100. + + The special value "false" explicitly disables + the check. + + + + + + + diff --git a/etc/descriptions/scxmldump.xml b/etc/descriptions/scxmldump.xml new file mode 100644 index 0000000..2884487 --- /dev/null +++ b/etc/descriptions/scxmldump.xml @@ -0,0 +1,178 @@ + + + + + Dump database objects to XML files. + + + + scxmldump [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#config-db + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/etc/descriptions/sczip.xml b/etc/descriptions/sczip.xml new file mode 100644 index 0000000..bda7863 --- /dev/null +++ b/etc/descriptions/sczip.xml @@ -0,0 +1,47 @@ + + + + Compress or expand files with ZIP algorithm + + + sczip file.dat -o file.zip + sczip file.dat -d -o file.zip + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + + + + + + diff --git a/etc/descriptions/seedlink.xml b/etc/descriptions/seedlink.xml new file mode 100644 index 0000000..e130fdf --- /dev/null +++ b/etc/descriptions/seedlink.xml @@ -0,0 +1,264 @@ + + + + Real-time waveform server implementing the SeedLink protocol. + + + + Defines the Seedlink port to accept requests. If this port + is changed and other modules depend on it (slarchive, processing) + those ports must be updated as well! + + + + + Path to the base directory of SeedLink data files (disk buffer). + + + + + Default network code. Used when a network code is omitted by a client + in STATION request. Should be set to the network code of the majority + of configured stations. 1 or 2 characters long, uppercase. + + + + + List of trusted addresses. + + + + + List of IP addresses or IP/mask pairs (in ipchains/iptables syntax) + that can access stations. Per station access definitions + supersede this parameter. By default any client can access + all stations. + + + + + Check start and end times of streams. + + + + + If stream_check = enabled, also check for gaps in all channels that + match given pattern. Register all gaps that are larger than +-0.5 seconds. + gap_check_pattern = [EBLV][HLNG][ZNE]|S[NG][ZNE]. + Disabled to save memory. + + + + + Time difference between records (microseconds) above which a gap is declared. + + + + + Can be enabled or disabled. Required for slinktool option -tw. + + + + + Same as window_extraction for trusted IP addresses. + + + + + Allow websocket connections. + + + + + Same as websocket for trusted IP addresses. + + + + + If activated Seedlink uses the mseedfifo to read records and + only the mseedfifo_plugin is started. This command is useful + to playback historic data by eg msrtsimul. + + + + + Defines a database read connection to be used for Seedlink + station descriptions. If no database is to be used (the default; + configure with "") then the station code will be used. Example: + "mysql://sysop:sysop@localhost/seiscomp". If a remote host is + specified, ensure that its database server is reachable from + this computer. + + + + + Info level provided to arbitrary hosts. + + + + + Info level provided to trusted hosts. + + + + + Whether requests should be logged. + + + + + Give warning if an input channel has time gap larger than this (microseconds). + + + + + Flush streams if an input channel has time gap larger than this (microseconds). + + + + + Reset FIR filters if an input channel has time gap larger than this (microseconds). + + + + + Enable backfilling buffer for out-of-order records. + This values defines its capacity in seconds. + + + + + Maximum allowed deviation from the sequence number of oldest packet if + packet with requested sequence number is not found. If seq_gap_limit is + exceeded, data flow starts from the next packet coming in, otherwise + from the oldest packet in buffer. + + + + + Total number of TCP/IP connections allowed. + + + + + Maximum number of TCP/IP connections per IP. + + + + + Maximum speed per connection (0: throttle disabled). + + + + + Path to lockfile to prevent multiple instances. + + + + + Size of memory buffer (number of recent Mini-SEED records kept in RAM). + + + + + Number of disk buffer segments (files under <dir>/station/segments/ + where <dir> is the directory given by the filebase parameter). + + + + + Size of one disk buffer segment in the records (512-byte units). + + + + + Number of blank records to insert after the re-scan of disk buffer + if <dir>/station/buffer.xml is not found (assuming the server + did not terminate correctly). + + + + + Encoding of Mini-SEED records created by SeedLink. The value must be + steim1 or steim2. If omitted, the global encoding parameter is used. + + + + + If configured with a value greater than zero then all sequence files + created by the chain plugin are deleted which are older than X minutes + before Seedlink is started. + + + + + + + + Configures general station parameters irrespective of a configured plugin. + + If stream_check, gap_check_pattern or gap_treshold is changed it is necessary + to remove the files <dir>/*/buffer.xml, where <dir> is the directory + given by the filebase parameter. In this case the disk buffer is re-scanned when + SeedLink is started (which will take some time). + + + + + List of IP addresses or IP/mask pairs (in ipchains/iptables syntax). + Only if a client's IP address + matches one of those the station is shown (slinktool -L, etc.) and + accessible. If omitted, the global access parameter is used. + If the global access parameter is not set any client can access the station. + + + + + Size of memory buffer (number of recent Mini-SEED records kept in RAM). + + + + + Number of disk buffer segments (files under <dir>/station/segments/ + where <dir> is the directory given by the filebase parameter). + + + + + Size of one disk buffer segment in the records (512-byte units). + + + + + Number of blank records to insert after the re-scan of disk buffer + if <dir>/station/buffer.xml is not found (assuming the server + did not terminate correctly). + + + + + Encoding of Mini-SEED records created by SeedLink. The value must be + steim1 or steim2. If omitted, the global encoding parameter is used. + + + + + Enable backfilling buffer for out-of-order records when raw samples + are transmitted. This values defines its capacity in seconds. + + + + + Name of the proc object uniquely defined in a seedlink template file, e.g. + $SEISCOMP_ROOT/share/templates/seedlink/streams_{name}.tpl. + proc is used for processing, e.g. renaming or resampling of raw + streams (streams submitted by a plugin as raw samples). + The template file is used to generate + $SEISCOMP_ROOT/var/lib/seedlink/streams.xml . + + Example template: $SEISCOMP_ROOT/share/templates/seedlink/streams_stream100.tpl + + + + + diff --git a/etc/descriptions/seedlink_antelope.xml b/etc/descriptions/seedlink_antelope.xml new file mode 100644 index 0000000..95b76f2 --- /dev/null +++ b/etc/descriptions/seedlink_antelope.xml @@ -0,0 +1,26 @@ + + + + Antelope ORB (TCP/IP) + + + + Regular expression to match ORB packets, default is all + waveform data. + + + + Hostname or IP of the Antelope ORB. + + + Port of the Antelope ORB. + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/seedlink_chain.xml b/etc/descriptions/seedlink_chain.xml new file mode 100644 index 0000000..d95a29a --- /dev/null +++ b/etc/descriptions/seedlink_chain.xml @@ -0,0 +1,150 @@ + + + + seedlink + Global options for the chain plugin + + + + + + Defines the maximum number of concurrent dial-up + connections. A value of 0 does not limit the + concurrent connections. + + + + + If enabled load_timetable is used by the chain plugin to request + the available time windows of each stream in the Seedlink buffer + to discard older records. + + + + + + + + Seedlink server (TCP/IP) + + + + Hostname or IP of the Seedlink server. + + + + Port of the Seedlink server + + + + List of stream selectors. If left empty all available + streams will be requested. See slinktool manpage for + more information. + + + + + Defines the input station if different from the stations + code. Usually leave this parameter unconfigured. If + configured and different from the stations code + the station code will be renamed to match the local station + code. + + + + + Defines the input network if different from the stations + network. Usually leave this parameter unconfigured. If + configured and different from the stations network code + the network code will be renamed to match the local network + code. + + + + + Usually stations with the same source addresses and dialup + options are grouped together and managed by a chain plugin + process. You can give here an additional group ID. Available + placeholders are $NET and $STA that are replaced by the + network code or station code. Whitespaces are replaced by + underscores when generating chain.xml. + + + + + The default timing quality is used when unpacking data that + does not contain blockette 1001, that allows SeedLink to + construct blockette 1001 for downsampled data. + + + + + If overlap removal is set to "initial" or "full", then + earlier records will be discarded (record number is used + in case several subsequent records have the same header time). + All other values are treated as "none". Leave this option + unset unless you know what you are doing. + + + + + If enabled then the chain plugin tries to subscribe for + channels with the BATCH command which is magnitudes faster + than the old handshake. Not all SeedLink servers support + the BATCH mode so it needs to be turned off in case of + problems. + + + + + + Enables dialup mode. Needs schedule and uptime to be + set accordingly. + + + + + Dialup uptime in seconds. + + + + + The dialup schedula in cron format. + + + + An optional dialup ifup script. + + + An optional dialup ifdown script. + + + + + + Allows to rename channels. This is a list of + strings where each item defines one mapping in the + format "SRC_LOCCHA:DST_LOCCHA". Either SRC_LOCCHA or + DST_LOCCHA may contain wildcard characters (question + mark) and the location code (LOC) in both identifiers + is optional. + + + + + Allows to unpack channels instead of sending them as + miniseed to Seedlink. Unpacked streams can be downsampled + by Seedlink. This is a list of strings where each item + defines one unpack mapping in the format + "SRC_LOCCHA:DST_ID:1" where DST_ID can + be an arbitrary channel id which must match the definition of + a stream in streams.xml. The last ":1" is optional and defines + if the unpacked data should be doubled (duplicate each sample). + In case rename mappings are active, SRC_LOCCHA referes to + the final channel name after renaming. + + + + + + diff --git a/etc/descriptions/seedlink_dr24.xml b/etc/descriptions/seedlink_dr24.xml new file mode 100644 index 0000000..4aa959a --- /dev/null +++ b/etc/descriptions/seedlink_dr24.xml @@ -0,0 +1,20 @@ + + + + Geotech DR24 (serial plugin) + + + Serial port. + + + Baud rate. + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/seedlink_echopro_3ch100hz.xml b/etc/descriptions/seedlink_echopro_3ch100hz.xml new file mode 100644 index 0000000..5e884cb --- /dev/null +++ b/etc/descriptions/seedlink_echopro_3ch100hz.xml @@ -0,0 +1,14 @@ + + + + Kelunji Echo/EchoPro serial plugin (3 Ch, 200 Hz) + + + Serial Port. + + + Name of stream processing scheme. + + + + diff --git a/etc/descriptions/seedlink_echopro_6ch200hz.xml b/etc/descriptions/seedlink_echopro_6ch200hz.xml new file mode 100644 index 0000000..e8048b3 --- /dev/null +++ b/etc/descriptions/seedlink_echopro_6ch200hz.xml @@ -0,0 +1,14 @@ + + + + Kelunji Echo/EchoPro serial plugin (6 Ch, 100 Hz) + + + Serial Port. + + + Name of stream processing scheme. + + + + diff --git a/etc/descriptions/seedlink_edata.xml b/etc/descriptions/seedlink_edata.xml new file mode 100644 index 0000000..415196f --- /dev/null +++ b/etc/descriptions/seedlink_edata.xml @@ -0,0 +1,20 @@ + + + + EarthData PS6-24 (serial plugin) + + + Serial port. + + + Baud rate. + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/seedlink_ewexport.xml b/etc/descriptions/seedlink_ewexport.xml new file mode 100644 index 0000000..8c703ad --- /dev/null +++ b/etc/descriptions/seedlink_ewexport.xml @@ -0,0 +1,34 @@ + + + + Earthworm export server (TCP/IP) + + + + Hostname or IP of the Earthworm export server. + + + + Port of the Earthworm export server. + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + + Text for heartbeat to server. + + + + + Rate at which to expect heartbeats from server in seconds. + + + + + + diff --git a/etc/descriptions/seedlink_ewexport_pasv.xml b/etc/descriptions/seedlink_ewexport_pasv.xml new file mode 100644 index 0000000..b571a90 --- /dev/null +++ b/etc/descriptions/seedlink_ewexport_pasv.xml @@ -0,0 +1,34 @@ + + + + Earthworm passive export server (TCP/IP) + + + + Bind IP of the Earthworm passive export server. + + + + Port of the Earthworm passive export server. + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + + Text for heartbeat to server. + + + + + Rate at which to expect heartbeats from server in seconds. + + + + + + diff --git a/etc/descriptions/seedlink_fs_mseed.xml b/etc/descriptions/seedlink_fs_mseed.xml new file mode 100644 index 0000000..0cb666a --- /dev/null +++ b/etc/descriptions/seedlink_fs_mseed.xml @@ -0,0 +1,95 @@ + + + + Mini-SEED file plugin + + + + + + + + + + + + + "pattern" is a POSIX extended regular expression that + must match input file names (useful for filtering out non-data + files). For example "BH[NEZ]" would match any files + that contained "BHE", "BHN" or "BHZ". + If no pattern is specified all files will be processed. + + + + + Look for data files at the 1st or 2nd directory level. + + + + + Move file to subdirectory "processed" before starting to read it. + + + + + Delete processed files. + + + + + Look only for files that are newer than the last file processed. + + + + + Timestamp file is used to save the modification time of the last + file processed. + + + + + New files are searched for every "polltime" seconds. + + + + + Wait until the file is at least n seconds old, before trying to + read it. + + + + + Tells how many debugging messages are printed. + + + + + Maximum number of consecutive zeros in datastream before data + gap will be declared (-1 = disabled). + + + + + If timing quality is not available, use this value as default + (-1 = disabled). + + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + Channel definitions (Mini-SEED streams are defined in streams.xml, + look for <proc name="generic_3x50">). + + + + + + + + diff --git a/etc/descriptions/seedlink_gmeteo.xml b/etc/descriptions/seedlink_gmeteo.xml new file mode 100644 index 0000000..edbe04b --- /dev/null +++ b/etc/descriptions/seedlink_gmeteo.xml @@ -0,0 +1,20 @@ + + + + GFZ meteo protocol (serial plugin) + + + Serial port. + + + Baud rate. + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/seedlink_hrd24.xml b/etc/descriptions/seedlink_hrd24.xml new file mode 100644 index 0000000..0a92171 --- /dev/null +++ b/etc/descriptions/seedlink_hrd24.xml @@ -0,0 +1,23 @@ + + + + Nanometrics HRD24 (serial plugin) + + + Serial port. + + + Baud rate. + + + Number of bundles in packet. + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/seedlink_liss.xml b/etc/descriptions/seedlink_liss.xml new file mode 100644 index 0000000..05f9dce --- /dev/null +++ b/etc/descriptions/seedlink_liss.xml @@ -0,0 +1,16 @@ + + + + LISS server (TCP/IP) + + + + Hostname or IP of the LISS server. + + + + Port of the LISS server + + + + diff --git a/etc/descriptions/seedlink_m24.xml b/etc/descriptions/seedlink_m24.xml new file mode 100644 index 0000000..af3b9ad --- /dev/null +++ b/etc/descriptions/seedlink_m24.xml @@ -0,0 +1,23 @@ + + + + Lennartz M24 + + + Serial port. + + + Baud rate. + + + Time offset in usecs. + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/seedlink_minilogger.xml b/etc/descriptions/seedlink_minilogger.xml new file mode 100644 index 0000000..7e8e092 --- /dev/null +++ b/etc/descriptions/seedlink_minilogger.xml @@ -0,0 +1,65 @@ + + + + SEP064 USB Seismometer Interface + + + + Device path and name of port for USB Seismometer Interface. + If the specified port cannot be opened or is not a USB Seismometer + Interface device, all available ports will be scanned. + + + + + Allow low-level setting of port interface attributes when available + ports are scanned to find a USB Seismometer Interface device, 0=NO, 1=Yes. + Setting 1 (=Yes) may help successful detection and correct reading of the + USB Seismometer Interface device, particularly for the RasberryPi, but can + have adverse effects on other devices, terminals, etc. open on the system. + + + + + Sets a fixed sample rate to report in the miniseed file header. + The default (-1) sets an estimated sample rate based on recent packet start + times. This estimated sample rate will vary slightly over time, potentially + producing errors in some software when reading the miniseed files. + See also: nominal_sample_rate + + + + + SEED data encoding type for writing miniseed files. + Supported values are: INT16, INT32, STEIM1, STEIM2 + + + + + The initial letters to set for the miniseed header 'channel', will be + prepended to the component. + + + + + Component of seismogram, one of Z, N or E. + + + + + Set sample rate and gain on SEP 064 device, 0=NO, 1=Yes. + + + + + Nominal sample rate per second, one of 20, 40 or 80. + + + + + Nominal gain, one of 1, 2 or 4. + + + + + diff --git a/etc/descriptions/seedlink_miscScript.xml b/etc/descriptions/seedlink_miscScript.xml new file mode 100644 index 0000000..10c37f7 --- /dev/null +++ b/etc/descriptions/seedlink_miscScript.xml @@ -0,0 +1,34 @@ + + + + Misceallaneous script + + + Script path. + + + Script arguments. + + + + Name of the proc object used for channel naming (and eventually down sampling). If this field is left to default (auto), a basic proc (without decimation) will be automatically generated. + + + + + Names of the channels. These names are only used if you choose to automatically generate the proc. Nevertheless, even if you are not using the auto proc, it is important to declare correctly this list as the number of channels is used to size the internal plugin's buffers. If you do not want seedlink to use one of the channels, replace it names with "none". Example : 01HHZ,none,none,02HHZ,HHE,HHN + + + + + Sampling frequency. Must be integer or a fraction of integers. + + + + + This parameter aims at enforcing the sending of seedlink data blocks (512bytes) even if they are not yet full. 0 means no enforcing. + + + + + diff --git a/etc/descriptions/seedlink_miscSerial.xml b/etc/descriptions/seedlink_miscSerial.xml new file mode 100644 index 0000000..79b754e --- /dev/null +++ b/etc/descriptions/seedlink_miscSerial.xml @@ -0,0 +1,39 @@ + + + + Miscellaneous ASCII serial Plugin (serial plugin) + + + Serial port. + + + Baud rate. + + + + Name of the proc object used for channel naming (and eventually down sampling). If this field is left to default (auto), a basic proc (without decimation) will be automatically generated. + + + + + Names of the channels. These names are only used if you choose to automatically generate the proc. Nevertheless, even if you are not using the auto proc, it is important to declare correctly this list as the number of channels is used to size the internal plugin's buffers. If you do not want seedlink to use one of the channels, replace it names with "none". Example : 01HHZ,none,none,02HHZ,HHE,HHN + + + + + Sampling frequency. Must be integer or a fraction of integers. + + + + + This parameter aims at enforcing the sending of seedlink data blocks (512bytes, even if they are not yet full. 0 means no enforcing. + + + + + If this parameter is not 0, a date frame ("YYYY,mm,dd hh:mm:ss\n") will be send on serial port every serial_clock_period seconds. + + + + + diff --git a/etc/descriptions/seedlink_mk6.xml b/etc/descriptions/seedlink_mk6.xml new file mode 100644 index 0000000..ee8f2ea --- /dev/null +++ b/etc/descriptions/seedlink_mk6.xml @@ -0,0 +1,14 @@ + + + + MK6 + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/seedlink_mppt.xml b/etc/descriptions/seedlink_mppt.xml new file mode 100644 index 0000000..1d87382 --- /dev/null +++ b/etc/descriptions/seedlink_mppt.xml @@ -0,0 +1,29 @@ + + + + SunSaver MPPT via Modbus TCP/IP + + + + Hostname or IP. + + + + + TCP port. + + + + + List of channels. + + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/seedlink_mseedfifo.xml b/etc/descriptions/seedlink_mseedfifo.xml new file mode 100644 index 0000000..6d2c442 --- /dev/null +++ b/etc/descriptions/seedlink_mseedfifo.xml @@ -0,0 +1,31 @@ + + + + seedlink + Global options for the mseedfifo plugin + + + + + + Path to named fifo pipe. Default: + $SEISCOMP_ROOT/var/run/seedlink/mseedfifo for seedlink. + Alias modules automatically replace "seedlink" + by the alias name. + + + + + Do not exit plugin if writer closes the fifo. + + + + + + + + + mseedfifo_plugin + + + diff --git a/etc/descriptions/seedlink_mseedscan.xml b/etc/descriptions/seedlink_mseedscan.xml new file mode 100644 index 0000000..5266999 --- /dev/null +++ b/etc/descriptions/seedlink_mseedscan.xml @@ -0,0 +1,18 @@ + + + + + MseedScan transfers new or changed miniSEED files from a directory + to a SeedLink server. It recursively a directory + structures and continuously checks for file modifications. The files + are presumed to be composed of Mini-SEED records of 512 Bytes. As the + files are appended or created the plugin will feed the new records to + the controlling SeedLink server. + + + + Directory to scan. + + + + diff --git a/etc/descriptions/seedlink_mws.xml b/etc/descriptions/seedlink_mws.xml new file mode 100644 index 0000000..23eab24 --- /dev/null +++ b/etc/descriptions/seedlink_mws.xml @@ -0,0 +1,20 @@ + + + + MWS (serial plugin) + + + Serial port. + + + Baud rate. + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/seedlink_naqs.xml b/etc/descriptions/seedlink_naqs.xml new file mode 100644 index 0000000..6ec7b3f --- /dev/null +++ b/etc/descriptions/seedlink_naqs.xml @@ -0,0 +1,20 @@ + + + + NAQS (TCP/IP) + + + Address of server, required. + + + Specify the NAQS control/data port. + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/seedlink_nmxp.xml b/etc/descriptions/seedlink_nmxp.xml new file mode 100644 index 0000000..a4d1008 --- /dev/null +++ b/etc/descriptions/seedlink_nmxp.xml @@ -0,0 +1,46 @@ + + + + + NAQS (TCP/IP) with nmxptool. nmxptool is not part of SeisComP and needs to be installed + under $INSTALLDIR/share/plugins/seedlink/. + + + + Hostname or IP of the NAQS server. + + + TCP port of the NAQS server. + + + Max latency in seconds (time to wait for missing packets). + + + + Short-Term-Completion (default -1). + + -1 is for Raw Stream, no Short-Term-Completion. + Packets contain compressed data. + + 0 decompressed packets are received in chronological order without + waiting for missing packets. + + [1..300] decompressed packets are received in chronological order + but waiting for missing packets at most SECs seconds. This requires + NaqsServer version greater than 1.90. + + + + + Defines additional options passed to nmxptool. + + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/seedlink_ps2400_eth.xml b/etc/descriptions/seedlink_ps2400_eth.xml new file mode 100644 index 0000000..a07fbc7 --- /dev/null +++ b/etc/descriptions/seedlink_ps2400_eth.xml @@ -0,0 +1,28 @@ + + + + + The ps2400_eth plug-in fetches data from a Earth Data PS2400/PS6-24 + digitizer (firmware >= 2.23) using TCP/IP. Other configurable parameters + are in $SEISCOMP_ROOT/share/templates/seedlink/ps2400_eth/plugins.ini.tpl. + + + + + IP of the Earth Data PS2400/PS6-24 digitizer. + + + + + Port of the Earth Data PS2400/PS6-24 digitizer. + + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/seedlink_q330.xml b/etc/descriptions/seedlink_q330.xml new file mode 100644 index 0000000..407a578 --- /dev/null +++ b/etc/descriptions/seedlink_q330.xml @@ -0,0 +1,68 @@ + + + + seedlink + Global options for the Q330 plugin + + + + + + UDP base port used when udp port is defined as "auto" + + + + + + + + Quanterra Q330 (UDP/IP) + + + + Hostname or IP. + + + + + Source port to receive data packets. + + + + + UDP port. + + + + + Name of the proc object uniquely defined in a template file of + the q330 plugin e.g. + $SEISCOMP_ROOT/share/templates/seedlink/q330/streams_{name}.tpl. + proc is used for processing, e.g. renaming or resampling of raw + streams (streams submitted by a plugin as raw samples). + The template file is used to generate + $SEISCOMP_ROOT/var/lib/seedlink/streams.xml . + + Example template: $SEISCOMP_ROOT/share/templates/seedlink/q330/streams_q330_100.tpl + + If proc is not given, the streams are taken as provided by the DAS. + + + + + Q330 dataport number (1-4). + + + + + Q330 serial number (with 0x prefix). + + + + + Q330 auth code (with 0x prefix). + + + + + diff --git a/etc/descriptions/seedlink_reftek.xml b/etc/descriptions/seedlink_reftek.xml new file mode 100644 index 0000000..5acafad --- /dev/null +++ b/etc/descriptions/seedlink_reftek.xml @@ -0,0 +1,66 @@ + + + + RefTek RTPD (TCP/IP) + + + Hostname or IP of the RTPD server. + + + Port of the RTPD server. + + + + Defines an alternative unit mapping file. The default file + is in $INSTALLDIR/var/lib/seedlink/reftek2sl.map. If + this path is not an absolute path it will treated as relative + path to $INSTALLDIR/var/lib/seedlink. The map file is used + if the unit is not explictly given in the binding. Each line + in the file is one mapping from a unit to a station name. The + first column is the unit id e.g. 91F3 and the second column + the Seedlink station name e.g. ABCD. + + + + The unit id. + + + + Name of the proc object uniquely defined in a template file of + the reftek plugin e.g. + $SEISCOMP_ROOT/share/templates/seedlink/reftek/streams_{name}.tpl. + proc is used for processing, e.g. renaming or resampling of raw + streams (streams submitted by a plugin as raw samples). + The template file is used to generate + $SEISCOMP_ROOT/var/lib/seedlink/streams.xml . + + Example template: $SEISCOMP_ROOT/share/templates/seedlink/reftek/streams_reftek.tpl + + + + + Timeout length in seconds. If no data is received from a Reftek + unit during this period, the plugin assumes that the unit is + disconnected. + + + + + Default timing quality in percents. This value will be used when + no timing quality information is available. Can be -1 to omit + the blockette 1001 altogether. + + + + + Timing quality to use when GPS is out of lock. + + + + + Send Reftek state-of-health data as Mini-SEED LOG stream. + + + + + diff --git a/etc/descriptions/seedlink_sadc.xml b/etc/descriptions/seedlink_sadc.xml new file mode 100644 index 0000000..b30d924 --- /dev/null +++ b/etc/descriptions/seedlink_sadc.xml @@ -0,0 +1,23 @@ + + + + SADC10/18/20/30 (serial plugin) + + + Serial port. + + + Baud rate. + + + Use PC time for initial time setting. + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/seedlink_scream.xml b/etc/descriptions/seedlink_scream.xml new file mode 100644 index 0000000..c94e80b --- /dev/null +++ b/etc/descriptions/seedlink_scream.xml @@ -0,0 +1,50 @@ + + + + + SCREAM! server (TCP/UDP/IP) plugin that receives data via + the SCREAM! protocol and sends raw data to Seedlink. The mapping + from SCREAM! channel id's to Seedlink net.sta.loc.cha is defined + in $INSTALLDIR/var/lib/seedlink/config/scream2sl.map if not + specified otherwise (parameter 'map'). + + + + IP of the SCREAM! server. + + + Port of the SCREAM! + + + + Enables TCP/IP mode. By default the data is received via + UDP but TCP is used instead. This needs a propert + configuration of the SCREAM! server to accept TCP + connections. + + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + Defines an alternative stream mapping file. The default file + is in $INSTALLDIR/var/lib/seedlink/scream2sl.map. If + this path is not an absolute path it will treated as relative + path to $INSTALLDIR/var/lib/seedlink. + + + + + Defines the channel mappings (ID:channel, ...) from SCREAM! + to the channel code used in streams.xml. If no channel mappings + are given, the map file defined in map (either default or custom) + is used. + + + + + diff --git a/etc/descriptions/seedlink_scream_ring.xml b/etc/descriptions/seedlink_scream_ring.xml new file mode 100644 index 0000000..756769d --- /dev/null +++ b/etc/descriptions/seedlink_scream_ring.xml @@ -0,0 +1,61 @@ + + + + + SCREAM! server plugin that receives data via + the SCREAM! protocol and sends raw data to Seedlink. The mapping + from SCREAM! channel id's to Seedlink net.sta.loc.cha is defined + in $INSTALLDIR/var/lib/seedlink/config/scream2sl.map if not + specified otherwise (parameter 'map'). This is the second revision + of the scream plugin which supports buffering for short-term + completeness. + + + + IP of the SCREAM! server. + + + Port of the SCREAM! + + + + Enables TCP/IP mode. By default the data is received via + UDP but TCP is used instead. This needs a propert + configuration of the SCREAM! server to accept TCP + connections. + + + + TCP request port of the SCREAM! for backfill requests + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + Size of the stream ringbuffers (small values are useful for RT systems; + large values for decreasing gaps). + + + + + Defines an alternative stream mapping file. The default file + is in $INSTALLDIR/var/lib/seedlink/scream2sl.map. If + this path is not an absolute path it will treated as relative + path to $INSTALLDIR/var/lib/seedlink. + + + + + Defines the channel mappings (ID:channel, ...) from SCREAM! + to the channel code used in streams.xml. If no channel mappings + are given, the map file defined in map (either default or custom) + is used. + + + + + diff --git a/etc/descriptions/seedlink_vaisala.xml b/etc/descriptions/seedlink_vaisala.xml new file mode 100644 index 0000000..c1b33a4 --- /dev/null +++ b/etc/descriptions/seedlink_vaisala.xml @@ -0,0 +1,20 @@ + + + + Vaisala ASCII protocol (serial plugin) + + + Serial port. + + + Baud rate. + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/seedlink_wago.xml b/etc/descriptions/seedlink_wago.xml new file mode 100644 index 0000000..eef0155 --- /dev/null +++ b/etc/descriptions/seedlink_wago.xml @@ -0,0 +1,29 @@ + + + + WAGO Controller (T-Elektronik) + + + + Hostname or IP. + + + + + TCP port. + + + + + List of channels. + + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/seedlink_wave24.xml b/etc/descriptions/seedlink_wave24.xml new file mode 100644 index 0000000..cfacdef --- /dev/null +++ b/etc/descriptions/seedlink_wave24.xml @@ -0,0 +1,20 @@ + + + + Wave24 + + + Serial port. + + + Baud rate. + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/seedlink_win.xml b/etc/descriptions/seedlink_win.xml new file mode 100644 index 0000000..41207a0 --- /dev/null +++ b/etc/descriptions/seedlink_win.xml @@ -0,0 +1,41 @@ + + + + WIN server + + + + UDP port to receive data packets. + + + + + Defines an alternative stream mapping file. The default file + is in $INSTALLDIR/var/lib/seedlink/win2sl.map. If + this path is not an absolute path it will treated as relative + path to $INSTALLDIR/var/lib/seedlink. + + + + + List of channels (hexID:name, ...). The name can either be a single + channel name or a combination of station and channel separated by + a whitespace, e.g. "ABCD BHZ". If no station name is given, + e.g. only BHZ is specified, the name of the station the plugin bound to + is used. The definition of a station name is important if the plugin + configuration is used as a profile and assigned to multiple stations. + In this case a mapping file would be the more appropriate solution. + + If no channel mappings are given, the map file defined in map + (either default or custom) is used. + + + + + Name of the proc object (defined in streams.xml); used for processing + raw streams (streams submitted by a plugin as raw samples). + + + + + diff --git a/etc/descriptions/sh2proc.xml b/etc/descriptions/sh2proc.xml new file mode 100644 index 0000000..2f2ea51 --- /dev/null +++ b/etc/descriptions/sh2proc.xml @@ -0,0 +1,54 @@ + + + + + Convert SeismicHandler event files to SCML. + + + + sh2proc [options] + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#daemon + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + + diff --git a/etc/descriptions/slarchive.xml b/etc/descriptions/slarchive.xml new file mode 100644 index 0000000..5ce4039 --- /dev/null +++ b/etc/descriptions/slarchive.xml @@ -0,0 +1,313 @@ + + + + SeedLink client for data stream archiving + + + + Host of the Seedlink server to connect to. If the acquisition + is running on one system nothing needs to be changed. + + + + + The port of the Seedlink server to connect to. If the acquisition + is running on one system this port must match the configured + local Seedlink port. + + + + + Path to waveform archive where all data is stored. Relative paths + (as the default) are treated relative to the installation + directory ($SEISCOMP_ROOT). + + + + + Number of records (512 byte units) to buffer before flushing to + disk. + + + + + The network reconnect delay (in seconds) for the connection + to the SeedLink server. If the connection breaks for any + reason this will govern how soon a reconnection should be + attempted. The default value is 30 seconds. + + + + + The network timeout (in seconds) for the connection to the + SeedLink server. If no data [or keep alive packets?] are received + in this time range the connection is closed and re-established + (after the reconnect delay has expired). The default value is + 600 seconds. A value of 0 disables the timeout. + + + + + Timeout for closing idle data stream files in seconds. The idle + time of the data streams is only checked when some packets has + arrived. If no packets arrived no idle stream files will be + closed. There is no reason to change this parameter except for + the unusual cases where the process is running against an open + file number limit. Default is 300 seconds. + + + + + Interval (in seconds) at which keepalive (heartbeat) packets + are sent to the server. Keepalive packets are only sent if + nothing is received within the interval. This requires a + Seedlink version >= 3. + + + + + + Path to cerificate store where all certificates and CRLs are stored. Relative + paths(as the default) are treated relative to the installation + directory ($SEISCOMP_ROOT). + If the signature check is enabled slarchive loads all files at start. The store + uses the OpenSSl store format. From the offical OpenSSL documentation: + "The directory should contain one certificate or CRL per file in PEM format, + with a file name of the form hash.N for a certificate, or hash.rN for a CRL. + The .N or .rN suffix is a sequence number that starts at zero, and is incremented + consecutively for each certificate or CRL with the same hash value. Gaps in the + sequence numbers are not supported, it is assumed that there are no more objects + with the same hash beyond the first missing number in the sequence.The .N or .rN suffix + is a sequence number that starts at zero, and is incremented consecutively for + each certificate or CRL with the same hash value. Gaps in the sequence numbers + are not supported, it is assumed that there are no more objects with the same + hash beyond the first missing number in the sequence." + The hash value can be obtained as follows: + + openssl x509 -hash -noout -in <file> + + + + + Signatures are expected to be carried in blockette 2000 + as opaque data. Modes: + + ignore : Signatures will be ignored and no further actions + will be taken. + warning: Signatures will be checked and all received records + which do not carry a valid signature or no signature + at all will be logged with at warning level. + skip : All received records without a valid signature + will be ignored and will not be processed. + + + + + + + slarchive [OPTION]... [host][:][port] + + + Address ([host][:][port]) is a required argument. It specifies the address + of the SeedLink server in host:port format. Either the host, port or both + can be omitted. If host is omitted then localhost is assumed, + i.e. ':18000' implies 'localhost:18000'. If the port is omitted + then 18000 is assumed, i.e. 'localhost' implies 'localhost:18000'. + If only ':' is specified 'localhost:18000' is assumed. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Configures what streams of a station should be archived and for + how long. + + + + + List of stream selectors. If left empty all available + streams will be requested. See slarchive manpage for + more information. + + + + + Number of days the data is kept in the archive. This + requires purge_datafile to be run as cronjob. + + + + + diff --git a/etc/descriptions/slinktool.xml b/etc/descriptions/slinktool.xml new file mode 100644 index 0000000..87ec016 --- /dev/null +++ b/etc/descriptions/slinktool.xml @@ -0,0 +1,200 @@ + + + + SeedLink query interface module + + + slinktool [OPTION]... [host][:][port] + + + Address ([host][:][port]) is a required argument. It specifies the address + of the SeedLink server in host:port format. Either the host, port or both + can be omitted. If host is omitted then localhost is assumed, + i.e. ':18000' implies 'localhost:18000'. If the port is omitted, + then 18000 is assumed, i.e. 'localhost' implies 'localhost:18000'. + If only ':' is specified, 'localhost:18000' is assumed. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/etc/descriptions/slmon.xml b/etc/descriptions/slmon.xml new file mode 100644 index 0000000..e087a7a --- /dev/null +++ b/etc/descriptions/slmon.xml @@ -0,0 +1,74 @@ + + + + SeedLink monitor creating web pages + + + + Title of the web page used as heading. + + + + + Refresh interval of the generated web page used by the browser. + The interval should be similar to the interval for starting slmon. + If empty, the web page must be manually refreshed from within + the browser. + + + + + Host of the Seedlink server to connect to. If the acquisition + is running on one system nothing needs to be changed. + + + + + The port of the Seedlink server to connect to. If the acquisition + is running on one system this port must match the configured + local Seedlink port. + + + + + e-mail address added to web pages. + + + + + Output directory of the web pages. + + + + + Favicon URL of the web pages. Not mandatory. + + + + + Name of Link shown in footer of web pages. + + + + + URL referred to by linkname in footer of web pages. + + + + + URL to live seismograms. %s will be replaced by station name. + Not mandatory. + + + + + + + + + Defines the group of the station that is monitored. + + + + + diff --git a/etc/global.cfg b/etc/global.cfg new file mode 100644 index 0000000..aaed987 --- /dev/null +++ b/etc/global.cfg @@ -0,0 +1,24 @@ +# Agency ID used to set creationInfo.agencyID in data model objects. Should not +# contain spaces. +agencyID = gempa + +# Datacenter ID which is primarily used by Arclink and its tools. Should not +# contain spaces. +datacenterID = test_may + +# Organization name used mainly by ArcLink and SeedLink. +organization = ctbto_training + +# SeisComP applications access waveform data through the RecordStream +# interface. Read the SeisComP documentation for a list of supported services +# and their configuration. +# This parameter configures RecordStream source URL. +# Format: [service://]location[#type] +# "service": The name of the RecordStream implementation. If not given, +# "file://" is implied. +# IMPORTANT: This parameter replaces former RecordStream configurations in +# "recordstream.service" and "recordstream.source" which have been removed. +recordstream = combined://slink/localhost:18000;sdsarchive//home/data/archive + +# Define a list of core modules loaded at startup. +core.plugins = dbmysql diff --git a/etc/init/access.py b/etc/init/access.py new file mode 100644 index 0000000..81e2211 --- /dev/null +++ b/etc/init/access.py @@ -0,0 +1,474 @@ +from __future__ import print_function +import os +import string +import time +import re +import glob +import shutil +import sys +import imp +import random +import fnmatch +import seiscomp.core +import seiscomp.config +import seiscomp.kernel +import seiscomp.system +import seiscomp.client +import seiscomp.datamodel + + +DEBUG = 0 + + +def parseBindPort(bind): + bindToks = bind.split(':') + if len(bindToks) == 1: + return int(bindToks[0]) + elif len(bindToks) == 2: + return int(bindToks[1]) + else: + return -1 + + +def collectParams(container): + params = {} + + for i in range(container.groupCount()): + params.update(collectParams(container.group(i))) + + for i in range(container.structureCount()): + params.update(collectParams(container.structure(i))) + + for i in range(container.parameterCount()): + p = container.parameter(i) + + if p.symbol.stage == seiscomp.system.Environment.CS_UNDEFINED: + continue + + params[p.variableName] = ",".join(p.symbol.values) + + return params + +def logd(message): + ''' + Debugging method + ''' + if DEBUG: + print(message, file=sys.stderr) + sys.stderr.flush() + +def log(message): + ''' + Helper method for outputting with flushing + ''' + print(message, file=sys.stdout) + sys.stdout.flush() + +class InventoryResolver(object): + def __init__(self, inventory): + self._inventory = inventory + pass + + ''' + Those should be internal methods only + ''' + def _overlaps(self, pstart, pend, cstart, cend): + if cstart is None and cend is None: return True + + if cstart is None: + cstart = seiscomp.core.Time() + + if pend is not None: + if pend > cstart: + if cend is None or pstart < cend: + return True + else: + if cend is None or pstart < cend: + return True + + return False + + def _getEnd(self, obj): + try: + return obj.end() + except ValueError: + return None + + def _codeMatch(self, obj, code): + if not code: return True + if fnmatch.fnmatch(str(obj.code()).upper(), code.strip().upper()): return True + return False + + def _collect(self, objs, count, code, start, end): + items = [] + + for i in range(0, count): + obj = objs(i) + + # Check code + if not self._codeMatch(obj, code): continue + + # Check time + if not self._overlaps(obj.start(), self._getEnd(obj), start, end): continue + + items.append(obj) + + return items + + def _findStreams(self, location, code, start, end): + items = self._collect(location.stream, location.streamCount(), code, start, end) + if len(items) == 0: + raise Exception("Location %s / %s does not have a stream named: %s in the time range %s / %s " % (location.code(), location.start(), code, start, end)) + return items + + def _findLocations(self, station, code, start, end): + items = self._collect(station.sensorLocation, station.sensorLocationCount(), code, start, end) + if len(items) == 0: + raise Exception("Station %s / %s does not have a location named: %s in the time range %s / %s " % (station.code(), station.start(), code, start, end)) + return items + + def _findStations(self, network, code, start, end): + items = self._collect(network.station, network.stationCount(), code, start, end) + if len(items) == 0: + raise Exception("Network %s / %s does not have a station named: %s in the time range %s / %s " % (network.code(), network.start(), code, start, end)) + return items + + def _findNetworks(self, code, start, end): + items = self._collect(self._inventory.network, self._inventory.networkCount(), code, start, end) + if len(items) == 0: + raise Exception("Inventory does not have a network named: %s in the time range %s / %s " % (code, start, end)) + return items + + def _truncateDate(self, obj, currentDate): + if currentDate < obj.start(): + return obj.start() + end = self._getEnd(obj) + if end and currentDate > end: + return end + return currentDate + + ''' + Public methods that should be used + ''' + def findStartDate(self, network, start, end): + if start is None: + return network.start() + return self._truncateDate(network, start) + + def findEndDate(self, network, start, end): + if end is None: + try: return network.end() + except ValueError: return None + + return self._truncateDate(network, end) + + def expandStream(self, stations, streams, start, end): + items = [] + + for strm in streams.split(','): + (locationCode, streamCode) = ('.' + strm).split('.')[-2:] + for station in stations: + try: + for location in self._findLocations(station, locationCode, start, end): + if locationCode: + currentLocCode = location.code() + else: + currentLocCode = "" + try: + for stream in self._findStreams(location, streamCode, start, end): + try: + items.index((currentLocCode, stream.code())) + except: + items.append((currentLocCode, stream.code())) + except Exception as e: + pass + except Exception as e: + pass + + return items + + def expandNetworkStation(self, ncode, scode, start, end): + items = [] + + for network in self._findNetworks(ncode, start, end): + + try: + stations = self._findStations(network, scode, start, end) + except Exception as e: + logd(str(e)) + continue + + # Append + items.append((network, stations)) + + if len(items) == 0: + raise Exception("Cannot find suitable %s network with station code %s ranging from %s / %s" % (ncode, scode, start, end)) + return items + +class AccessUpdater(seiscomp.client.Application): + def __init__(self, argc, argv): + seiscomp.client.Application.__init__(self, argc, argv) + self.setLoggingToStdErr(True) + self.setMessagingEnabled(True) + self.setDatabaseEnabled(True, True) + self.setAutoApplyNotifierEnabled(False) + self.setInterpretNotifierEnabled(False) + self.setMessagingUsername("_sccfgupd_") + ##self.setLoadConfigModuleEnabled(True) + # Load all configuration modules + ##self.setConfigModuleName("") + self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP) + + def send(self, *args): + ''' + A simple wrapper that sends a message and tries to resend it in case of + an error. + ''' + while not self.connection().send(*args): + log("sending failed, retrying") + time.sleep(1) + + def sendNotifiers(self, group): + Nsize = seiscomp.datamodel.Notifier.Size() + + if Nsize > 0: + logd("trying to apply %d changes..." % Nsize) + else: + logd("no changes to apply") + return + + Nmsg = seiscomp.datamodel.Notifier.GetMessage(True) + + it = Nmsg.iter() + msg = seiscomp.datamodel.NotifierMessage() + + maxmsg = 100 + sent = 0 + mcount = 0 + + try: + try: + while it.get(): + msg.attach(seiscomp.datamodel.Notifier.Cast(it.get())) + mcount += 1 + if msg and mcount == maxmsg: + sent += mcount + logd("sending message (%5.1f %%)" % (sent / float(Nsize) * 100.0)) + self.send(group, msg) + msg.clear() + mcount = 0 + #self.sync("_sccfgupd_") + + it.next() + except: + pass + finally: + if msg.size(): + logd("sending message (%5.1f %%)" % 100.0) + self.send(group, msg) + msg.clear() + #self.sync("_sccfgupd_") + + def run(self): + ''' + Reimplements the main loop of the application. This methods collects + all bindings and updates the database. It searches for already existing + objects and updates them or creates new objects. Objects that is didn't + touched are removed. This tool is the only one that should writes the + configuration into the database and thus manages the content. + ''' + # Initialize the basic directories + filebase = seiscomp.system.Environment.Instance().installDir() + descdir = os.path.join(filebase, "etc", "descriptions") + keydir = os.path.join(filebase, "etc", "key", self.name()) + + # Load definitions of the configuration schema + defs = seiscomp.system.SchemaDefinitions() + if defs.load(descdir) == False: + log("could not read descriptions") + return False + + if defs.moduleCount() == 0: + log("no modules defined, nothing to do") + return False + + # Create a model from the schema and read its configuration including + # all bindings. + model = seiscomp.system.Model() + model.create(defs) + model.readConfig() + + mod_access = model.module("access") + + existingAccess = {} + + routing = self.query().loadRouting() + inventory = self.query().loadInventory() + iResolver = InventoryResolver(inventory) + + seiscomp.datamodel.Notifier.Enable() + seiscomp.datamodel.Notifier.SetCheckEnabled(False) + + # Update access on basis of access module + if mod_access: + logd("Working on access bindings") + for staid in mod_access.bindings.keys(): + binding = mod_access.getBinding(staid) + if not binding: continue + + params = {} + for i in range(binding.sectionCount()): + params.update(collectParams(binding.section(i))) + + access_users = params.get('access.users') + access_start = params.get('access.start') + access_end = params.get('access.end') + access_netonly = params.get('access.disableStationCode') + access_streams = params.get('access.streams') + + if access_netonly is None or access_netonly == "false": + access_netonly = False + else: + access_netonly = True + + if not access_users: continue + + networkCode = staid.networkCode + stationCode = staid.stationCode + + if access_start: + access_start = seiscomp.core.Time.FromString(access_start, "%Y-%m-%d %H:%M:%S") + + if access_end: + access_end = seiscomp.core.Time.FromString(access_end, "%Y-%m-%d %H:%M:%S") + + if access_netonly: + stationCode = "" + + ## Resolve Inventory + try: + networkList = iResolver.expandNetworkStation(networkCode, stationCode, access_start, access_end) + except Exception as e: + #log("Access issue, cannot find network object for %s %s::\n\t %s" % (staid.networkCode, staid.stationCode, str(e))) + for user in access_users.split(','): + existingAccess[(networkCode, "", "", "", user, "1980-01-01 00:00:00")] = (None,) + continue + + ## Generate routes for each network found + for (network, stations) in networkList: + + ## Resolve start date / end date of routing to be generated + aStart = iResolver.findStartDate(network, access_start, access_end) + aEnd = iResolver.findEndDate(network, access_start, access_end) + + if not access_streams: + for user in access_users.split(','): + existingAccess[(networkCode, stationCode, "", "", user, aStart.toString("%Y-%m-%d %H:%M:%S"))] = (aEnd,) + continue + + ## Add the route or routes for this net + for (locationCode, streamCode) in iResolver.expandStream(stations, access_streams, access_start, access_end): + for user in access_users.split(','): + existingAccess[(networkCode, stationCode, locationCode, streamCode, user, aStart.toString("%Y-%m-%d %H:%M:%S"))] = (aEnd,) + + + for ((networkCode, stationCode, locationCode, streamCode, user, start), (end,)) in existingAccess.items(): + access = routing.access(seiscomp.datamodel.AccessIndex(networkCode, stationCode, locationCode, streamCode, user, seiscomp.core.Time.FromString(start, "%Y-%m-%d %H:%M:%S"))) + if not access: + access = seiscomp.datamodel.Access() + access.setNetworkCode(networkCode) + access.setStationCode(stationCode) + access.setLocationCode(locationCode) + access.setStreamCode(streamCode) + access.setUser(user) + access.setStart(seiscomp.core.Time.FromString(start, "%Y-%m-%d %H:%M:%S")) + access.setEnd(end) + routing.add(access) + else: + update = False + try: + cend = access.end() + if (not end) or (end and cend != end): + access.setEnd(end) + update = True + except ValueError as e: + if end: + access.setEnd(end) + update = True + + if update: + access.update() + + + i = 0 + while i < routing.accessCount(): + access = routing.access(i) + if (access.networkCode(), access.stationCode(), access.locationCode(), access.streamCode(), access.user(), access.start().toString("%Y-%m-%d %H:%M:%S")) not in existingAccess: + routing.remove(access) + continue + + i += 1 + + self.sendNotifiers("ROUTING") + return True + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def start(self): + return 0 + + def updateConfig(self): + messaging = True + messagingPort = 18180 + messagingProtocol = 'scmp'; + + try: messaging = self.env.getBool("messaging.enable") + except: pass + + # If messaging is disabled in kernel.cfg, do not do anything + if not messaging: + log("- messaging disabled, nothing to do") + return 0 + + # Load scmaster configuration and figure the bind ports of scmaster out + cfg = seiscomp.config.Config() + seiscomp.system.Environment.Instance().initConfig(cfg, "scmaster") + + # First check the unencrypted port and prefer that + p = parseBindPort(cfg.getString("interface.bind")) + if p > 0: + messagingPort = p + + try: + bind = self.env.getString("messaging.bind") + bindToks = bind.split(':') + if len(bindToks) == 1: + messagingPort = int(bindToks[0]) + elif len(bindToks) == 2: + messagingPort = int(bindToks[1]) + else: + sys.stdout.write( + "E invalid messaging bind parameter: %s\n" % bind) + sys.stdout.write(" expected either 'port' or 'ip:port'\n") + return 1 + except: + pass + + # Otherwise check if ssl is enabled + else: + p = parseBindPort(cfg.getString("interface.ssl.bind")) + if p > 0: + messagingPort = p + messagingProtocol = 'scmps' + + # Synchronize database configuration + params = [self.name, '--console', '1', '-H', + '%s://localhost:%d' % (messagingProtocol, messagingPort)] + # Create the database update app and run it + # This app implements a seiscomp.client.Application and connects + # to localhost regardless of connections specified in global.cfg to + # prevent updating a remote installation by accident. + app = AccessUpdater(len(params), params) + return app() diff --git a/etc/init/diskmon.py b/etc/init/diskmon.py new file mode 100644 index 0000000..9f6cc9d --- /dev/null +++ b/etc/init/diskmon.py @@ -0,0 +1,61 @@ +from __future__ import print_function +import os, sys, subprocess as sub +import seiscomp.kernel, seiscomp.config + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def start(self): + cfg = seiscomp.config.Config() + cfg.readConfig(os.path.join(self.env.SEISCOMP_ROOT, "etc", "defaults", self.name + ".cfg")) + cfg.readConfig(os.path.join(self.env.SEISCOMP_ROOT, "etc", self.name + ".cfg")) + try: cfg.readConfig(os.path.join(os.environ['HOME'], ".seiscomp", self.name + ".cfg")) + except: pass + + run_dir = os.path.join(self.env.SEISCOMP_ROOT, "var", "run", self.name) + try: os.makedirs(run_dir) + except: pass + + # Set defaults + threshold = 95 + emails = [] + + try: threshold = cfg.getInt("threshold") + except: pass + try: emails = cfg.getStrings("emails") + except: pass + + if len(emails) == 0: + sys.stderr.write("%s: warning: nothing to do, no email addresses configured\n" % self.name) + return 0 + + cmd = 'df | awk -v max="%d" \'{ if ( $5 > max ) print $0 }\'' % threshold + p = sub.Popen(['sh', '-c', cmd], stdout=sub.PIPE) + msg = p.stdout.read() + + statfile = os.path.join(run_dir, "msg_sent") + + if msg.find('\n') < 0: + # Nothing to do + try: os.remove(statfile) + except: print("ERROR: could not remove stat file %s" % statfile) + return 1 + + # Message already sent? + if os.path.exists(statfile): + return 0 + + try: hostname = os.uname()[1] + except: hostname = 'unknown host' + + msg = "The following disks at %s are nearly full:\n\n" % hostname + msg + try: open(statfile, "w") + except: print("ERROR: could not create stat file in %s" % statfile) + + os.system('echo "%s" | mail -s "disk nearly full" %s' % (msg, " ".join(emails))) + return 0 + + + def stop(self): + return True diff --git a/etc/init/ew2sc.py b/etc/init/ew2sc.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/ew2sc.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/fdsnws.py b/etc/init/fdsnws.py new file mode 100644 index 0000000..389713e --- /dev/null +++ b/etc/init/fdsnws.py @@ -0,0 +1,57 @@ +import os +import subprocess +import time + +import seiscomp.kernel + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def supportsAliases(self): + # The default handler does not support aliases + return True + + def reload(self): + if not self.isRunning(): + self.env.log('{} is not running'.format(self.name)) + return 1 + + self.env.log('reloading {}'.format(self.name)) + + lockfile = self.env.lockFile(self.name) + reloadfile = os.path.join(os.path.dirname(lockfile), + '{}.reload'.format(self.name)) + + # Open pid file + with open(lockfile, "r") as f: + # Try to read the pid + pid = int(f.readline()) + + # touch reload file + open(reloadfile, 'a').close() + + if not os.path.isfile(reloadfile): + self.env.log('could not touch reload file: {}' \ + .format(reloadfile)) + return 1 + + # Send SIGHUP + subprocess.call("kill -s HUP %d" % pid, shell=True) + + # wait for reload file to disappear + for _ in range(0, int(self.reloadTimeout * 5)): + time.sleep(0.2) + if not os.path.isfile(reloadfile): + return 0 + + self.env.log('timeout exceeded') + + return 1 + +# Uncomment for authbind (running service on privileged ports) +# def _run(self): +# params = "--depth 2 " + self.env.binaryFile(self.name) + " " + self._get_start_params() +# binaryPath = "authbind" +# return self.env.start(self.name, binaryPath, params) diff --git a/etc/init/kernel.py b/etc/init/kernel.py new file mode 100644 index 0000000..a9ba023 --- /dev/null +++ b/etc/init/kernel.py @@ -0,0 +1,26 @@ +import os, sys +import seiscomp.config, seiscomp.kernel + +class Module(seiscomp.kernel.CoreModule): + def __init__(self, env): + seiscomp.kernel.CoreModule.__init__( + self, env, env.moduleName(__file__)) + # High priority + self.order = -100 + # This is a config module which writes the setup config to kernel.cfg + self.isConfigModule = True + + def setup(self, setup_config): + cfgfile = os.path.join(self.env.SEISCOMP_ROOT, "etc", self.name + ".cfg") + + cfg = seiscomp.config.Config() + cfg.readConfig(cfgfile) + try: + cfg.setString("organization", setup_config.getString( + "kernel.global.organization")) + except: + cfg.remove("organization") + + cfg.writeConfig() + + return 0 diff --git a/etc/init/ql2sc.py b/etc/init/ql2sc.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/ql2sc.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/scalert.py b/etc/init/scalert.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/scalert.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/scamp.auto b/etc/init/scamp.auto new file mode 100644 index 0000000..e69de29 diff --git a/etc/init/scamp.py b/etc/init/scamp.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/scamp.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/scautoloc.auto b/etc/init/scautoloc.auto new file mode 100644 index 0000000..e69de29 diff --git a/etc/init/scautoloc.py b/etc/init/scautoloc.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/scautoloc.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/scautopick.auto b/etc/init/scautopick.auto new file mode 100644 index 0000000..e69de29 diff --git a/etc/init/scautopick.py b/etc/init/scautopick.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/scautopick.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/scdb.py b/etc/init/scdb.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/scdb.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/scevent.auto b/etc/init/scevent.auto new file mode 100644 index 0000000..e69de29 diff --git a/etc/init/scevent.py b/etc/init/scevent.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/scevent.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/scevtlog.auto b/etc/init/scevtlog.auto new file mode 100644 index 0000000..e69de29 diff --git a/etc/init/scevtlog.py b/etc/init/scevtlog.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/scevtlog.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/scimex.py b/etc/init/scimex.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/scimex.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/scimport.py b/etc/init/scimport.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/scimport.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/scinv.py b/etc/init/scinv.py new file mode 100644 index 0000000..89c2064 --- /dev/null +++ b/etc/init/scinv.py @@ -0,0 +1,80 @@ +import os +import sys +import seiscomp.kernel +import seiscomp.config +import seiscomp.system + + +def parseBindPort(bind): + bindToks = bind.split(':') + if len(bindToks) == 1: + return int(bindToks[0]) + elif len(bindToks) == 2: + return int(bindToks[1]) + else: + return -1 + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, "inventory") + # This is a config module which synchronizes bindings with the database + self.isConfigModule = True + # Give this module a high priority to be executed at first (unless + # another module defines a negative value. It ensures that successive + # modules can read an up-to-date inventory and use the latest rc files. + self.order = 0 + + def updateConfig(self): + messaging = True + messagingPort = 18180 + messagingProtocol = 'scmp' + + try: + messaging = self.env.getBool("messaging.enable") + except: + pass + + # If messaging is disabled in kernel.cfg, do not do anything + if not messaging: + sys.stdout.write("- messaging disabled, nothing to do\n") + return 0 + + # Load scmaster configuration and figure the bind ports of scmaster out + cfg = seiscomp.config.Config() + seiscomp.system.Environment.Instance().initConfig(cfg, "scmaster") + + # First check the unencrypted port and prefer that + p = parseBindPort(cfg.getString("interface.bind")) + if p > 0: + messagingPort = p + + try: + bind = self.env.getString("messaging.bind") + bindToks = bind.split(':') + if len(bindToks) == 1: + messagingPort = int(bindToks[0]) + elif len(bindToks) == 2: + messagingPort = int(bindToks[1]) + else: + sys.stdout.write( + "E invalid messaging bind parameter: %s\n" % bind) + sys.stdout.write(" expected either 'port' or 'ip:port'\n") + return 1 + except: + pass + + # Otherwise check if ssl is enabled + else: + p = parseBindPort(cfg.getString("interface.ssl.bind")) + if p > 0: + messagingPort = p + messagingProtocol = 'scmps' + + # Synchronize inventory + return os.system("scinv sync --console=1 -H %s://localhost:%d/production --filebase \"%s\" --rc-dir \"%s\" --key-dir \"%s\"" + % (messagingProtocol, messagingPort, + os.path.join(self.env.root, "etc", "inventory"), + os.path.join(self.env.root, "var", "lib", "rc"), + os.path.join(self.env.root, "etc", "key"))) + diff --git a/etc/init/scm.py b/etc/init/scm.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/scm.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/scmag.auto b/etc/init/scmag.auto new file mode 100644 index 0000000..e69de29 diff --git a/etc/init/scmag.py b/etc/init/scmag.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/scmag.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/scmaster.py b/etc/init/scmaster.py new file mode 100644 index 0000000..2b9b80d --- /dev/null +++ b/etc/init/scmaster.py @@ -0,0 +1,634 @@ +from __future__ import print_function + +import os +import shutil +import sys +import subprocess +import tempfile +from seiscomp import config, kernel, system + +# Python version depended string conversion +if sys.version_info[0] < 3: + py3bstr = str + py3ustr = str + +else: + py3bstr = lambda s: s.encode('utf-8') + py3ustr = lambda s: s.decode('utf-8', 'replace') + + +class DBParams: + def __init__(self): + self.db = None + self.rwuser = None + self.rwpwd = None + self.rouser = None + self.ropwd = None + self.rohost = None + self.rwhost = None + self.drop = False + self.create = False + + +def check_output(cmd): + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, shell=True) + out = proc.communicate() + return [py3ustr(out[0]), py3ustr(out[1]), proc.returncode] + + +def addEntry(cfg, param, item): + # Adds an item to a parameter list + try: + items = cfg.getStrings(param) + except ValueError: + items = config.VectorStr() + + if item not in items: + items.push_back(item) + cfg.setStrings(param, items) + + +def removeEntry(cfg, param, item): + # Removes an items from a parameter list + try: + items = cfg.getStrings(param) + for i in range(items.size()): + if items[i] == item: + items.erase(items.begin() + i) + cfg.setStrings(param, items) + break + except ValueError: + # No parameter set, nothing to do + pass + + +# The kernel module which starts scmaster if enabled +class Module(kernel.CoreModule): + def __init__(self, env): + kernel.CoreModule.__init__( + self, env, env.moduleName(__file__)) + # High priority + self.order = -1 + + # Default values + self.messaging = True + self.messagingBind = None + + try: + self.messaging = self.env.getBool("messaging.enable") + except ValueError: + pass + try: + self.messagingBind = self.env.getString("messaging.bind") + except ValueError: + pass + + # Add master port + def _get_start_params(self): + if self.messagingBind: + return kernel.Module._get_start_params(self) + \ + " --bind %s" % self.messagingBind + + return kernel.Module._get_start_params(self) + + def start(self): + if not self.messaging: + print("[kernel] {} is disabled by config".format(self.name), + file=sys.stderr) + return 1 + + appConfig = system.Environment.Instance().appConfigFileName(self.name) + localConfig = system.Environment.Instance().configFileName(self.name) + lockFile = os.path.join(self.env.SEISCOMP_ROOT, self.env.lockFile(self.name)) + try: + needRestart = False + started = os.path.getmtime(lockFile) + try: + needRestart = started < os.path.getmtime(appConfig) + except Exception: + pass + try: + needRestart = started < os.path.getmtime(localConfig) + except Exception: + pass + + if needRestart: + self.stop() + except Exception: + pass + + return kernel.CoreModule.start(self) + + def check(self): + if not self.messaging: + print("[kernel] {} is disabled by config".format(self.name), + file=sys.stderr) + return 0 + + return kernel.CoreModule.check(self) + + def status(self, shouldRun): + if not self.messaging: + shouldRun = False + return kernel.CoreModule.status(self, shouldRun) + + def readDBParams(self, params, setup_config): + try: + params.db = setup_config.getString(self.name + + ".database.enable.backend.db") + except ValueError as err: + print(err) + print(" - database name not set, ignoring setup", + file=sys.stderr) + return False + + try: + params.rwhost = setup_config.getString( + self.name + ".database.enable.backend.rwhost") + except ValueError: + print(" - database host (rw) not set, ignoring setup", + file=sys.stderr) + return False + + try: + params.rwuser = setup_config.getString( + self.name + ".database.enable.backend.rwuser") + except ValueError: + print(" - database user (rw) not set, ignoring setup", + file=sys.stderr) + return False + + try: + params.rwpwd = setup_config.getString( + self.name + ".database.enable.backend.rwpwd") + except ValueError: + print(" - database password (rw) not set, ignoring setup", + file=sys.stderr) + return False + + try: + params.rohost = setup_config.getString( + self.name + ".database.enable.backend.rohost") + except ValueError: + print(" - database host (ro) not set, ignoring setup", + file=sys.stderr) + return False + + try: + params.rouser = setup_config.getString( + self.name + ".database.enable.backend.rouser") + except ValueError: + print(" - database user (ro) not set, ignoring setup", + file=sys.stderr) + return False + + try: + params.ropwd = setup_config.getString( + self.name + ".database.enable.backend.ropwd") + except ValueError: + print(" - database password (ro) not set, ignoring setup", + file=sys.stderr) + return False + + try: + params.create = setup_config.getBool( + self.name + ".database.enable.backend.create") + except ValueError: + params.create = False + + try: + params.drop = setup_config.getBool( + self.name + ".database.enable.backend.create.drop") + except ValueError: + params.drop = False + + return True + + def setup(self, setup_config): + schemapath = os.path.join(self.env.SEISCOMP_ROOT, "share", "db") + + cfg = config.Config() + system.Environment.Instance().initConfig(cfg, self.name) + + try: + dbenable = setup_config.getBool(self.name + ".database.enable") + except ValueError: + print(" - database.enable not set, ignoring setup", + file=sys.stderr) + return 0 + + dbBackend = None + + if not dbenable: + removeEntry(cfg, "queues.production.plugins", "dbstore") + removeEntry( + cfg, "queues.production.processors.messages", "dbstore") + cfg.remove("queues.production.processors.messages.dbstore.driver") + cfg.remove("queues.production.processors.messages.dbstore.read") + cfg.remove("queues.production.processors.messages.dbstore.write") + else: + try: + dbBackend = setup_config.getString( + self.name + ".database.enable.backend") + except ValueError: + print(" - database backend not set, ignoring setup", + file=sys.stderr) + return 1 + + if dbBackend == "mysql/mariadb": + dbBackend = "mysql" + try: + rootpwd = setup_config.getString( + self.name + ".database.enable.backend.create.rootpw") + except ValueError: + rootpwd = "" + + try: + runAsSuperUser = setup_config.getBool( + self.name + ".database.enable.backend.create.runAsSuperUser") + except ValueError: + runAsSuperUser = False + + params = DBParams() + if not self.readDBParams(params, setup_config): + return 1 + + cfg.setString("queues.production.processors.messages.dbstore.read", + "{}:{}@{}/{}" + .format(params.rouser, params.ropwd, params.rohost, params.db)) + cfg.setString("queues.production.processors.messages.dbstore.write", + "{}:{}@{}/{}" + .format(params.rwuser, params.rwpwd, params.rwhost, params.db)) + + if params.create: + dbScript = os.path.join(schemapath, "mysql_setup.py") + options = [ + params.db, + params.rwuser, + params.rwpwd, + params.rouser, + params.ropwd, + params.rwhost, + rootpwd, + str(params.drop), + schemapath + ] + + binary = os.path.join(schemapath, "pkexec_wrapper.sh") + print("+ Running MySQL database setup script {}" + .format(dbScript), file=sys.stderr) + if runAsSuperUser: + cmd = "{} seiscomp-python {} {}".format(binary, dbScript, " ".join(options)) + else: + cmd = "{} {}".format(dbScript, " ".join(options)) + + p = subprocess.Popen(cmd, shell=True) + ret = p.wait() + if ret != 0: + print(" - Failed to setup database", file=sys.stderr) + return 1 + + elif dbBackend == "postgresql": + dbBackend = "postgresql" + + params = DBParams() + if not self.readDBParams(params, setup_config): + return 1 + + cfg.setString("queues.production.processors.messages.dbstore.read", + "{}:{}@{}/{}" + .format(params.rouser, params.ropwd, + params.rohost, params.db)) + cfg.setString("queues.production.processors.messages.dbstore.write", + "{}:{}@{}/{}" + .format(params.rwuser, params.rwpwd, + params.rwhost, params.db)) + + if params.create: + try: + tmpPath = tempfile.mkdtemp() + os.chmod(tmpPath, 0o755) + tmpPath = os.path.join(tmpPath, "setup") + try: + shutil.copytree(schemapath, tmpPath) + filename = os.path.join(self.env.SEISCOMP_ROOT, + "bin", "seiscomp-python") + shutil.copy(filename, tmpPath) + except Exception as err: + print(err) + return 1 + + dbScript = os.path.join(tmpPath, "postgres_setup.py") + options = [ + params.db, + params.rwuser, + params.rwpwd, + params.rouser, + params.ropwd, + params.rwhost, + str(params.drop), + tmpPath + ] + + binary = os.path.join(schemapath, "pkexec_wrapper.sh") + print("+ Running PostgreSQL database setup script {}" + .format(dbScript), file=sys.stderr) + cmd = "{} su postgres -c \"{}/seiscomp-python {} {}\"" \ + .format(binary, tmpPath, dbScript, " ".join(options)) + + p = subprocess.Popen(cmd, shell=True) + ret = p.wait() + if ret != 0: + print(" - Failed to setup database", + file=sys.stderr) + return 1 + finally: + try: + shutil.rmtree(tmpPath) + except OSError: + pass + + elif dbBackend == "sqlite3": + dbBackend = "sqlite3" + dbScript = os.path.join(schemapath, "sqlite3_setup.py") + + try: + create = setup_config.getBool( + self.name + ".database.enable.backend.create") + except BaseException: + create = False + + try: + filename = setup_config.getString( + self.name + ".database.enable.backend.filename") + filename = system.Environment.Instance().absolutePath(filename) + except BaseException: + filename = os.path.join(self.env.SEISCOMP_ROOT, "var", + "lib", "seiscomp.db") + + if not filename: + print(" - location not set, ignoring setup", + file=sys.stderr) + return 1 + + try: + override = setup_config.getBool( + self.name + ".database.enable.backend.create.override") + except BaseException: + override = False + + options = [ + filename, + schemapath + ] + + if create: + print("+ Running SQLite3 database setup script {}" + .format(dbScript), file=sys.stderr) + cmd = "seiscomp-python {} {} {}".format(dbScript, " ".join(options), override) + p = subprocess.Popen(cmd, shell=True) + ret = p.wait() + if ret != 0: + print(" - Failed to setup database", file=sys.stderr) + return 1 + + cfg.setString("queues.production.processors.messages.dbstore.read", + filename) + cfg.setString("queues.production.processors.messages.dbstore.write", + filename) + + # Configure db backend for scmaster + cfg.setString("core.plugins", "db" + dbBackend) + cfg.setString( + "queues.production.processors.messages.dbstore.driver", + dbBackend) + + addEntry(cfg, "queues.production.plugins", "dbstore") + addEntry(cfg, "queues.production.processors.messages", "dbstore") + + cfg.writeConfig( + system.Environment.Instance().configFileLocation( + self.name, system.Environment.CS_CONFIG_APP)) + + # Now we need to insert the corresponding plugin to etc/global.cfg + # that all connected local clients can handle the database backend + if dbBackend: + cfgfile = os.path.join(self.env.SEISCOMP_ROOT, "etc", "global.cfg") + cfg = config.Config() + cfg.readConfig(cfgfile) + cfg.setString("core.plugins", "db" + dbBackend) + cfg.writeConfig() + + return 0 + + def updateConfig(self): + cfg = config.Config() + system.Environment.Instance().initConfig(cfg, self.name) + + try: + queues = cfg.getStrings("queues") + except ValueError: + queues = [] + + # iterate through all queues and check DB schema version if message + # processor dbstore is present + for queue in queues: + print("INFO: Checking queue '{}'".format(queue), file=sys.stderr) + try: + msgProcs = cfg.getStrings("queues.{}.processors.messages" + .format(queue)) + if "dbstore" in msgProcs and not self.checkDBStore(cfg, queue): + return 1 + except ValueError: + print(" * ignoring - no database backend configured", + file=sys.stderr) + + return 0 + + def checkDBStore(self, cfg, queue): + prefix = "queues.{}.processors.messages.dbstore".format(queue) + + print(" * checking DB schema version", file=sys.stderr) + + try: + backend = cfg.getString("{}.driver".format(prefix)) + except ValueError: + print("WARNING: dbstore message processor activated but no " + "database backend configured", file=sys.stderr) + return True + + if backend not in ("mysql", "postgresql"): + print("WARNING: Only MySQL and PostgreSQL migrations are " + "supported right now. Please check and upgrade the " + "database schema version yourselves.", file=sys.stderr) + return True + + print(" * check database write access ... ", end='', file=sys.stderr) + + # 1. Parse connection + try: + params = cfg.getString("{}.write".format(prefix)) + except ValueError: + print("failed", file=sys.stderr) + print("WARNING: dbstore message processor activated but no " + "write connection configured", file=sys.stderr) + return True + + user = 'sysop' + pwd = 'sysop' + host = 'localhost' + db = 'seiscomp' + port = None + + tmp = params.split('@') + if len(tmp) > 1: + params = tmp[1] + + tmp = tmp[0].split(':') + if len(tmp) == 1: + user = tmp[0] + pwd = None + elif len(tmp) == 2: + user = tmp[0] + pwd = tmp[1] + else: + print("failed", file=sys.stderr) + print("WARNING: Invalid scmaster.cfg:{}.write, cannot check " + "schema version".format(prefix), file=sys.stderr) + return True + + tmp = params.split('/') + if len(tmp) > 1: + tmpHost = tmp[0] + db = tmp[1] + else: + tmpHost = tmp[0] + + # get host name and port + tmp = tmpHost.split(':') + host = tmp[0] + if len(tmp) == 2: + try: + port = int(tmp[1]) + except ValueError: + print("ERROR: Invalid port number {}".format(tmp[1]), + file=sys.stderr) + return True + + db = db.split('?')[0] + + # 2. Try to login + if backend == "mysql": + cmd = "mysql -u \"%s\" -h \"%s\" -D\"%s\" --skip-column-names" % ( + user, host, db) + if port: + cmd += " -P %d" % (port) + if pwd: + cmd += " -p\"%s\"" % pwd.replace('$', '\\$') + cmd += " -e \"SELECT value from Meta where name='Schema-Version'\"" + else: + if pwd: + os.environ['PGPASSWORD'] = pwd + cmd = "psql -U \"%s\" -h \"%s\" -t \"%s\"" % (user, host, db) + if port: + cmd += " -p %d" % (port) + cmd += " -c \"SELECT value from Meta where name='Schema-Version'\"" + + out = check_output(cmd) + if out[2] != 0: + print("failed", file=sys.stderr) + print("WARNING: {} returned with error:".format(backend), + file=sys.stderr) + print(out[1].strip(), file=sys.stderr) + return False + + print("passed", file=sys.stderr) + + version = out[0].strip() + print(" * database schema version is {}".format(version), + file=sys.stderr) + + try: + vmaj, vmin = [int(t) for t in version.split('.')] + except ValueError: + print("WARNING: wrong version format: expected MAJOR.MINOR", + file=sys.stderr) + return True + + strictVersionMatch = True + try: + strictVersionMatch = cfg.getBool("{}.strictVersionMatch" + .format(prefix)) + except ValueError: + pass + + if not strictVersionMatch: + print(" * database version check is disabled", file=sys.stderr) + return True + + migrations = os.path.join(self.env.SEISCOMP_ROOT, "share", "db", + "migrations", backend) + migration_paths = {} + + vcurrmaj = 0 + vcurrmin = 0 + + for f in os.listdir(migrations): + if os.path.isfile(os.path.join(migrations, f)): + name, ext = os.path.splitext(f) + if ext != '.sql': + continue + try: + vfrom, vto = name.split('_to_') + except ValueError: + continue + + try: + vfrommaj, vfrommin = [int(t) for t in vfrom.split('_')] + except ValueError: + continue + + try: + vtomaj, vtomin = [int(t) for t in vto.split('_')] + except ValueError: + continue + + migration_paths[(vfrommaj, vfrommin)] = (vtomaj, vtomin) + + if (vtomaj > vcurrmaj) or ((vtomaj == vcurrmaj) and (vtomin > vcurrmin)): + vcurrmaj = vtomaj + vcurrmin = vtomin + + print(" * last migration version is %d.%d" % (vcurrmaj, vcurrmin), + file=sys.stderr) + + if vcurrmaj == vmaj and vcurrmin == vmin: + print(" * schema up-to-date", file=sys.stderr) + return True + + if (vmaj, vmin) not in migration_paths: + print(" * no migrations found", file=sys.stderr) + return True + + print(" * migration to the current version is required. Apply the " + "following", file=sys.stderr) + print(" database migration scripts in exactly the given order:", + file=sys.stderr) + while (vmaj, vmin) in migration_paths: + (vtomaj, vtomin) = migration_paths[(vmaj, vmin)] + fname = "%d_%d_to_%d_%d.sql" % (vmaj, vmin, vtomaj, vtomin) + if backend == "mysql": + print(" * mysql -u {} -h {} -p {} < {}" + .format(user, host, db, os.path.join(migrations, fname)), + file=sys.stderr) + elif backend == "postgresql": + print(" * psql -U {} -h {} -d {} -W -f {}" + .format(user, host, db, os.path.join(migrations, fname)), + file=sys.stderr) + else: + print(" * {}".format(os.path.join(migrations, fname)), + file=sys.stderr) + (vmaj, vmin) = (vtomaj, vtomin) + + return False diff --git a/etc/init/scproclat.py b/etc/init/scproclat.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/scproclat.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/scqc.auto b/etc/init/scqc.auto new file mode 100644 index 0000000..e69de29 diff --git a/etc/init/scqc.py b/etc/init/scqc.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/scqc.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/screloc.py b/etc/init/screloc.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/screloc.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/scsohlog.py b/etc/init/scsohlog.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/scsohlog.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/scvoice.auto b/etc/init/scvoice.auto new file mode 100644 index 0000000..e69de29 diff --git a/etc/init/scvoice.py b/etc/init/scvoice.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/scvoice.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/scwfas.py b/etc/init/scwfas.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/scwfas.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/scwfparam.py b/etc/init/scwfparam.py new file mode 100644 index 0000000..e09cfe2 --- /dev/null +++ b/etc/init/scwfparam.py @@ -0,0 +1,19 @@ +import seiscomp.kernel +import sys + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def updateConfigProxy(self): + return "trunk" + + def updateConfig(self): + # By default the "trunk" module must be configured to write the + # bindings into the database + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True diff --git a/etc/init/seedlink.auto b/etc/init/seedlink.auto new file mode 100644 index 0000000..e69de29 diff --git a/etc/init/seedlink.py b/etc/init/seedlink.py new file mode 100644 index 0000000..df8a326 --- /dev/null +++ b/etc/init/seedlink.py @@ -0,0 +1,739 @@ +from __future__ import print_function +import os, string, time, re, glob, shutil, sys, imp, resource +import seiscomp.kernel, seiscomp.config + +try: + import seiscomp.system + hasSystem = True +except: + hasSystem = False + +try: + import seiscomp.datamodel + import seiscomp.io + dbAvailable = True +except: + dbAvailable = False + + +''' +NOTE: +The plugin to be used for a station of configured with: +plugin = [type] +All plugin specific parameters are stored in plugin.[type].*. + +All parameters from seedlink.cfg are not prefixed with "seedlink.". +Local parameters that are created from seedlink.cfg parameters are +prefixed with "seedlink.". + +NOTE2: Support a database connection to get station descriptions. +''' + +def _loadDatabase(dbUrl): + """ + Load inventory from a database, but only down to the station level. + """ + m = re.match("(?P^.*):\/\/(?P.+?:.+?@.+?\/.+$)", dbUrl) + if not m: + raise Exception("error in parsing SC3 DB URL") + db = m.groupdict() + try: + registry = seiscomp.system.PluginRegistry.Instance() + registry.addPluginName("db" + db["dbDriverName"]) + registry.loadPlugins() + except Exception as e: + raise(e) ### "Cannot load database driver: %s" % e) + dbDriver = seiscomp.io.DatabaseInterface.Create(db["dbDriverName"]) + if dbDriver is None: + raise Exception("Cannot find database driver " + db["dbDriverName"]) + if not dbDriver.connect(db["dbAddress"]): + raise Exception("Cannot connect to database at " + db["dbAddress"]) + dbQuery = seiscomp.datamodel.DatabaseQuery(dbDriver) + if dbQuery is None: + raise Exception("Cannot get DB query object") + print(" Loading inventory from database ... ", file=sys.stderr) + inventory = seiscomp.datamodel.Inventory() + dbQuery.loadNetworks(inventory) + for ni in range(inventory.networkCount()): + dbQuery.loadStations(inventory.network(ni)) + print("Done.", file=sys.stderr) + return inventory + + +def _loadStationDescriptions(inv): + """From an inventory, prepare a dictionary of station code descriptions. + + In theory, we should only use stations with current time windows. + + """ + d = dict() + + for ni in range(inv.networkCount()): + n = inv.network(ni) + net = n.code() + if net not in d: + d[net] = {} + + for si in range(n.stationCount()): + s = n.station(si) + sta = s.code() + d[net][sta] = s.description() + + try: + end = s.end() + except: # ValueException ??? + end = None + #print "Found in inventory:", net, sta, end, s.description() + return d + +class TemplateModule(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + self.pkgroot = self.env.SEISCOMP_ROOT + + cfg = seiscomp.config.Config() + + # Defaults Global + App Cfg + cfg.readConfig(os.path.join(self.pkgroot, "etc", "defaults", "global.cfg")) + cfg.readConfig(os.path.join(self.pkgroot, "etc", "defaults", self.name + ".cfg")) + + # Config Global + App Cfg + cfg.readConfig(os.path.join(self.pkgroot, "etc", "global.cfg")) + cfg.readConfig(os.path.join(self.pkgroot, "etc", self.name + ".cfg")) + + # User Global + App Cfg + cfg.readConfig(os.path.join(os.environ['HOME'], ".seiscomp", "global.cfg")) + cfg.readConfig(os.path.join(os.environ['HOME'], ".seiscomp", self.name + ".cfg")) + + self.global_params = dict([(x, ",".join(cfg.getStrings(x))) for x in cfg.names()]) + self.station_params = dict() + self.plugin_dir = os.path.join(self.pkgroot, "share", "plugins", "seedlink") + self.template_dir = os.path.join(self.pkgroot, "share", "templates", "seedlink") + self.alt_template_dir = "" #os.path.join(self.env.home + self.config_dir = os.path.join(self.pkgroot, "var", "lib", self.name) + + self.database_str = "" + if "inventory_connection" in self.global_params: + #WRONG self.database_str = cfg.getStrings("seedlink.readConnection") + self.database_str = self.global_params["inventory_connection"] + #self.database_str = cfg.getStrings("seedlink.database.type")+cfg.getStrings("seedlink.database.parameters") + + self.seedlink_station_descr = dict() + self.rc_dir = os.path.join(self.pkgroot, "var", "lib", "rc") + self.run_dir = os.path.join(self.pkgroot, "var", "run", self.name) + self.bindings_dir = os.path.join(self.pkgroot, "etc", "key") + self.key_dir = os.path.join(self.bindings_dir, self.name) + self.net = None + self.sta = None + + def _read_station_config(self, cfg_file): + cfg = seiscomp.config.Config() + cfg.readConfig(os.path.join(self.key_dir, cfg_file)) + self.station_params = dict([(x, ",".join(cfg.getStrings(x))) for x in cfg.names()]) + #self.station_params_ex = dict(filter(lambda s: s[1].find("$") != -1, [(x, ",".join(cfg.getStrings(x))) for x in cfg.names()])) + + def _process_template(self, tpl_file, source=None, station_scope=True, print_error=True): + tpl_paths = [] + + if source: + tpl_paths.append(os.path.join(self.alt_template_dir, source)) + tpl_paths.append(os.path.join(self.template_dir, source)) + + tpl_paths.append(self.alt_template_dir) + tpl_paths.append(self.template_dir) + + params = self.global_params.copy() + #params_ex = self.global_params_ex.copy() + + if station_scope: + params.update(self.station_params) + #params_ex.update(self.station_params_ex) + + params['pkgroot'] = self.pkgroot + + #for (p,v) in params_ex.items(): + # try: + # t2 = seiscomp.kernel.Template(v) + # params[p] = t2.substitute(params) + # + # except (KeyError, ValueError): + # pass + + return self.env.processTemplate(tpl_file, tpl_paths, params, print_error) + + def param(self, name, station_scope=True, print_warning=False): + if station_scope: + try: + return self.station_params[name] + + except KeyError: + pass + else: + try: + return self.global_params[name] + + except KeyError: + pass + + if print_warning: + if station_scope: + print("warning: parameter '%s' is not defined for station %s %s" % (name, self.net, self.sta)) + else: + print("warning: parameter '%s' is not defined at global scope" % (name,)) + + raise KeyError + + def setParam(self, name, value, station_scope=True): + self._set(name, value, station_scope) + + def _get(self, name, station_scope=True): + try: return self.param(name, station_scope) + except KeyError: return "" + + def _set(self, name, value, station_scope=True): + if station_scope: + self.station_params[name] = value + + else: + self.global_params[name] = value + +class Module(TemplateModule): + def __init__(self, env): + TemplateModule.__init__(self, env) + # Set kill timeout to 5 minutes + self.killTimeout = 300 + + def _run(self): + if self.env.syslog: + daemon_opt = '-D ' + else: + daemon_opt = '' + + daemon_opt += "-v -f " + os.path.join(self.config_dir, "seedlink.ini") + + try: + lim = resource.getrlimit(resource.RLIMIT_NOFILE) + resource.setrlimit(resource.RLIMIT_NOFILE, (lim[1], lim[1])) + + lim = resource.getrlimit(resource.RLIMIT_NOFILE) + print(" maximum number of open files set to", lim[0], file=sys.stderr) + + except Exception as e: + print(" failed to raise the maximum number open files:", str(e), file=sys.stderr) + + if "sequence_file_cleanup" in self.global_params: + try: + max_minutes = int(self.global_params["sequence_file_cleanup"]) + if max_minutes > 0: + files = glob.glob(os.path.join(self.run_dir, "*.seq")) + for f in files: + if (time.time()-os.path.getmtime(f))/60 >= max_minutes: + print(" removing sequence file %s" % f, file=sys.stderr) + os.remove(f) + else: + print(" sequence_file_cleanup disabled", file=sys.stderr) + + except ValueError: + print(" sequence_file_cleanup parameter is not a number: '%s'" % str(self.global_params["sequence_file_cleanup"]), file=sys.stderr) + return 1 + + return self.env.start(self.name, self.env.binaryFile(self.name), daemon_opt,\ + not self.env.syslog) + + def _getPluginHandler(self, source_type): + try: + return self.plugins[source_type] + except KeyError: + path = os.path.join(self.template_dir, source_type, "setup.py") + try: f = open(path, 'r') + except: return None + + modname = '__seiscomp_seedlink_plugins_' + source_type + if modname in sys.modules: + mod = sys.modules[modname] + else: + # create a module + mod = imp.new_module(modname) + mod.__file__ = path + + # store it in sys.modules + sys.modules[modname] = mod + + # our namespace is the module dictionary + namespace = mod.__dict__ + + # test whether this has been done already + if not hasattr(mod, 'SeedlinkPluginHandler'): + code = f.read() + # compile and exec dynamic code in the module + exec(compile(code, '', 'exec'), namespace) + + mod = namespace.get('SeedlinkPluginHandler') + handler = mod() + self.plugins[source_type] = handler + return handler + + def _generateStationForIni(self): + ini = 'station %s description = "%s"\n' % \ + (self._get('seedlink.station.id'), self._get('seedlink.station.description')) + ini += ' name = "%s"\n' % self._get('seedlink.station.code') + ini += ' network = "%s"\n' % self._get('seedlink.station.network') + if self._get('seedlink.station.access'): + ini += ' access = "%s"\n' % self._get('seedlink.station.access').replace(',',' ') + if self._get('seedlink.station.blanks'): + ini += ' blanks = "%s"\n' % self._get('seedlink.station.blanks') + if self._get('seedlink.station.encoding'): + ini += ' encoding = "%s"\n' % self._get('seedlink.station.encoding') + if self._get('seedlink.station.buffers'): + ini += ' buffers = "%s"\n' % self._get('seedlink.station.buffers') + if self._get('seedlink.station.segments'): + ini += ' segments = "%s"\n' % self._get('seedlink.station.segments') + if self._get('seedlink.station.segsize'): + ini += ' segsize = "%s"\n' % self._get('seedlink.station.segsize') + if self._get('seedlink.station.backfill_buffer'): + ini += ' backfill_buffer = "%s"\n' % self._get('seedlink.station.backfill_buffer') + if self._get('seedlink.station.sproc'): + ini += ' proc = "%s"\n' % self._get('seedlink.station.sproc') + ini += '\n' + return ini + + def __process_station(self, profile): + if profile: + self.station_config_file = "profile_%s" % (profile,) + else: + self.station_config_file = "station_%s_%s" % (self.net, self.sta) + + self._read_station_config(self.station_config_file) + + # Generate plugin independent parameters + self._set('seedlink.station.id', self.net + '.' + self.sta) + self._set('seedlink.station.code', self.sta) + self._set('seedlink.station.network', self.net) + self._set('seedlink.station.access', self._get('access')) + self._set('seedlink.station.blanks', self._get('blanks')) + self._set('seedlink.station.encoding', self._get('encoding')) + self._set('seedlink.station.buffers', self._get('buffers')) + self._set('seedlink.station.segments', self._get('segments')) + self._set('seedlink.station.segsize', self._get('segsize')) + self._set('seedlink.station.backfill_buffer', self._get('backfill_buffer')) + self._set('seedlink.station.sproc', self._get('proc')) + + # Supply station description: + # 1. try getting station description from a database + # 2. read station description from seiscomp/var/lib/rc/station_NET_STA + # 3. if not set, use the station code + + description = "" + + if len(self.seedlink_station_descr) > 0: + try: + description = self.seedlink_station_descr[self.net][self.sta] + except KeyError: + pass + + if len(description) == 0: + try: + rc = seiscomp.config.Config() + rc.readConfig(os.path.join(self.rc_dir, "station_%s_%s" % (self.net, self.sta))) + description = rc.getString("description") + except Exception as e: + # Maybe the rc file doesn't exist, maybe there's no readable description. + pass + + if len(description) == 0: + description = self.sta + + self._set('seedlink.station.description', description) + + self.station_count += 1 + + if self._last_net != self.net: + print("+ network %s" % self.net) + self._last_net = self.net + + print(" + station %s %s" % (self.sta, description)) + + # If real-time simulation is activated do not parse the sources + # and force the usage of the mseedfifo_plugin + if self.msrtsimul: + self._set('seedlink.station.sproc', '') + self.seedlink_station[(self.net, self.sta)] = self._generateStationForIni() + self._getPluginHandler('mseedfifo') + return + + for source_type in self._get('sources').split(','): + if not source_type: continue + + source_alias = source_type + toks = source_type.split(':') + if len(toks) > 2: + print("Error: invalid source identifier '%s', expected '[alias:]type'") + continue + elif len(toks) == 2: + source_alias = toks[0] + source_type = toks[1] + + # Plugins are outsourced to external handlers + # that can be added with new plugins. + # This requires a handler file: + # share/templates/seedlink/$type/setup.py + pluginHandler = self._getPluginHandler(source_type) + if pluginHandler is None: + print("Error: no handler for plugin %s defined" % source_type) + continue + + stat = source_type + if source_alias != source_type: + stat += " as " + source_alias + + print(" + source %s" % stat) + + # Backup original binding parameters + station_params = self.station_params.copy() + #station_params_ex = self.station_params_ex.copy() + + # Modify parameter set. Remove alias definition with type string + if source_type != source_alias: + tmp_dict = {} + for x in self.station_params.keys(): + if x.startswith('sources.%s.' % source_type): continue + if x.startswith('sources.%s.' % source_alias): + toks = x.split('.') + toks[1] = source_type + tmp_dict[".".join(toks)] = self.station_params[x] + else: + tmp_dict[x] = self.station_params[x] + self.station_params = tmp_dict + + #tmp_dict = {} + #for x in self.station_params_ex.keys(): + # if x.startswith('sources.%s.' % source_type): continue + # if x.startswith('sources.%s.' % source_alias): + # toks = x.split('.') + # toks[1] = source_type + # tmp_dict[".".join(toks)] = self.station_params_ex[x] + # else: + # tmp_dict[x] = self.station_params_ex[x] + #self.station_params_ex = tmp_dict + + # Create source entry that ends up in seedlink.ini as plugin + try: + source_dict = self.seedlink_source[source_type] + + except KeyError: + source_dict = {} + self.seedlink_source[source_type] = source_dict + + source_key = pluginHandler.push(self) + if source_key is None: + source_key = source_type + else: + source_key = (source_type, source_key) + + if source_key not in source_dict: + source_id = source_type + str(len(source_dict)) + + else: + (source_type, source_id) = source_dict[source_key][:2] + + # Update internal parameters usable by a template + self._set('seedlink.source.type', source_type) + self._set('seedlink.source.id', source_id) + source_dict[source_key] = (source_type, source_id, self.global_params.copy(), self.station_params.copy()) + + # Create procs for this type for streams.xml + sproc_name = self._get('sources.%s.proc' % (source_type)) + if sproc_name: + self.sproc_used = True + sproc_filename = "streams_%s.tpl" % sproc_name.split(':')[0] + sproc = self._process_template(sproc_filename, source_type, True, False) + if sproc: + self.sproc[sproc_name] = sproc + else: + print("WARNING: cannot find %s" % sproc_filename) + + # Read plugins.ini template for this source and store content + # under the provided key for this binding + plugin_ini = self._process_template("plugins.ini.tpl", source_type, True, False) + if plugin_ini: + self.plugins_ini[source_key] = plugin_ini + + templates = self._get('sources.%s.templates' % (source_type)) + if templates: + for t in templates.split(','): + self.templates.add((t, source_type, 0)) + + # Allow plugin handler to override station id + station_params['seedlink.station.id'] = self.station_params['seedlink.station.id'] + + # Set original parameters + self.station_params = station_params + + # Add station procs + sproc_name = self._get('proc') + if sproc_name: + self.sproc_used = True + sproc_filename = "streams_%s.tpl" % sproc_name + sproc = self._process_template(sproc_filename, None, True, False) + if sproc: + self.sproc[sproc_name] = sproc + else: + print("WARNING: cannot find %s" % sproc_filename) + + # Create station section for seedlink.ini + self.seedlink_station[(self.net, self.sta)] = self._generateStationForIni() + + def __load_stations(self): + self.seedlink_source = {} + self.seedlink_station = {} + self.plugins_ini = {} + self.sproc = {} + self.plugins = {} + self.sproc_used = False + self.station_count = 0 + + if self.env.syslog: + self._set('seedlink._daemon_opt', ' -D', False) + else: + self._set('seedlink._daemon_opt', '', False) + + self._set('seedlink.plugin_dir', self.plugin_dir, False) + self._set('seedlink.config_dir', self.config_dir, False) + self._set('seedlink.run_dir', self.run_dir, False) + self._set('seedlink.filters', os.path.join(self.config_dir, "filters.fir"), False) + self._set('seedlink.streams', os.path.join(self.config_dir, "streams.xml"), False) + + self.templates = set() + self.templates.add(("backup_seqfiles", None, 0o755)) + + rx_binding = re.compile(r'(?P[A-Za-z0-9_\.-]+)(:(?P[A-Za-z0-9_-]+))?$') + + files = glob.glob(os.path.join(self.bindings_dir, "station_*")) + files.sort() + self._last_net = "" + + for f in files: + try: + (path, net, sta) = f.split('_')[-3:] + if not path.endswith("station"): + print("invalid path", f) + + except ValueError: + print("invalid path", f) + continue + + self.net = net + self.sta = sta + + fd = open(f) + line = fd.readline() + while line: + line = line.strip() + if not line or line[0] == '#': + line = fd.readline() + continue + + m = rx_binding.match(line) + if not m: + print("invalid binding in %s: %s" % (f, line)) + line = fd.readline() + continue + + if m.group('module') != self.name: + line = fd.readline() + continue + + profile = m.group('profile') + self.__process_station(profile) + break + + fd.close() + + def _set_default(self, name, value, station_scope = True): + try: self.param(name, station_scope) + except: self._set(name, value, station_scope) + + def supportsAliases(self): + return True + + def requiresKernelModules(self): + return False + + def updateConfig(self): + # Set default values + try: self._set_default("organization", self.env.getString("organization"), False) + except: pass + + self._set_default("lockfile", os.path.join("@ROOTDIR@", self.env.lockFile(self.name)), False) + self._set_default("filebase", os.path.join("@ROOTDIR@", "var", "lib", self.name, "buffer"), False) + self._set_default("port", "18000", False) + self._set_default("encoding", "steim2", False) + self._set_default("trusted", "127.0.0.0/8", False) + self._set_default("access", "0.0.0.0/0", False) + self._set_default("stream_check", "true", False) + self._set_default("window_extraction", "true", False) + self._set_default("window_extraction_trusted", "true", False) + self._set_default("websocket", "false", False) + self._set_default("websocket_trusted", "false", False) + + self._set_default("buffers", "100", False) + self._set_default("segments", "50", False) + self._set_default("segsize", "1000", False) + + self._set_default("gap_check_pattern", "", False) + self._set_default("gap_treshold", "", False) + + self._set_default("info", "streams", False) + self._set_default("info_trusted", "all", False) + self._set_default("request_log", "true", False) + self._set_default("proc_gap_warn", "10", False) + self._set_default("proc_gap_flush", "100000", False) + self._set_default("proc_gap_reset", "1000000", False) + self._set_default("backfill_buffer", "0", False) + self._set_default("seq_gap_limit", "100000", False) + self._set_default("connections", "500", False) + self._set_default("connections_per_ip", "20", False) + self._set_default("bytespersec", "0", False) + + # This seedlink version expectes composed station ids: net.sta + self._set("composed_station_id", "true", False) + + ## Expand the @Variables@ + if hasSystem: + e = seiscomp.system.Environment.Instance() + self.setParam("filebase", e.absolutePath(self.param("filebase", False)), False) + self.setParam("lockfile", e.absolutePath(self.param("lockfile", False)), False) + else: + self.setParam("filebase", self.param("filebase", False), False) + self.setParam("lockfile", self.param("lockfile", False), False) + + if self._get("msrtsimul", False).lower() == "true": + self.msrtsimul = True + else: + self.msrtsimul = False + + # Load custom stream processor definitions + custom_procs = self._process_template("streams_custom.tpl", None, True, False) + if custom_procs: self.sproc[""] = sproc + + # Load descriptions from inventory: + if self.database_str: + if dbAvailable == True: + print(" Loading station descriptions from %s" % self.database_str, file=sys.stderr) + inv = _loadDatabase(self.database_str) + self.seedlink_station_descr = _loadStationDescriptions(inv) + else: + print(" Database configured but trunk is not installed", file=sys.stderr) + self.seedlink_station_descr = dict() + + try: os.makedirs(self.config_dir) + except: pass + + try: os.makedirs(self.run_dir) + except: pass + + self.__load_stations() + + for p in self.plugins.values(): + p.flush(self) + + if self.msrtsimul: + self.seedlink_source['mseedfifo'] = {1:('mseedfifo',1,self.global_params.copy(),{})} + + if self._get("stream_check", False).lower() == "true": + self._set("stream_check", "enabled", False) + else: + self._set("stream_check", "disabled", False) + + if self._get("window_extraction", False).lower() == "true": + self._set("window_extraction", "enabled", False) + else: + self._set("window_extraction", "disabled", False) + + if self._get("window_extraction_trusted", False).lower() == "true": + self._set("window_extraction_trusted", "enabled", False) + else: + self._set("window_extraction_trusted", "disabled", False) + + if self._get("websocket", False).lower() == "true": + self._set("websocket", "enabled", False) + else: + self._set("websocket", "disabled", False) + + if self._get("websocket_trusted", False).lower() == "true": + self._set("websocket_trusted", "enabled", False) + else: + self._set("websocket_trusted", "disabled", False) + + if self._get("request_log", False).lower() == "true": + self._set("request_log", "enabled", False) + else: + self._set("request_log", "disabled", False) + + self._set("name", self.name, False) + fd = open(os.path.join(self.config_dir, "seedlink.ini"), "w") + fd.write(self._process_template("seedlink_head.tpl", None, False)) + + if self.sproc_used: + fd.write(self._process_template("seedlink_sproc.tpl", None, False)) + + for i in self.seedlink_source.values(): + for (source_type, source_id, self.global_params, self.station_params) in i.values(): + source = self._process_template("seedlink_plugin.tpl", source_type) + if source: + fd.write(source) + + fd.write(self._process_template("seedlink_station_head.tpl", None, False)) + + for k in sorted(self.seedlink_station.keys()): + fd.write(self.seedlink_station[k]) + + fd.close() + + if self.plugins_ini: + fd = open(os.path.join(self.config_dir, "plugins.ini"), "w") + for i in self.plugins_ini.values(): + fd.write(i) + + fd.close() + else: + # If no plugins.ini is not used remove it from previous runs + try: os.remove(os.path.join(self.config_dir, "plugins.ini")) + except: pass + + if self.sproc_used: + fd = open(self._get('seedlink.streams', False), "w") + fd.write('\n') + + for i in self.sproc.values(): + fd.write(i) + + fd.write('\n') + fd.close() + + fd = open(self._get('seedlink.filters', False), "w") + fd.write(self._process_template("filters.fir.tpl", None, False)) + fd.close() + + # If no stream procs are used, remove the generated files of a + # previous run + else: + try: os.remove(self._get('seedlink.streams', False)) + except: pass + try: os.remove(self._get('seedlink.filters', False)) + except: pass + + for (f, s, perm) in self.templates: + fd = open(os.path.join(self.config_dir, f), "w") + fd.write(self._process_template(f + '.tpl', s, False)) + fd.close() + if perm: + os.chmod(os.path.join(self.config_dir, f), perm) + + return 0 + + + def printCrontab(self): + print("55 23 * * * %s >/dev/null 2>&1" % (os.path.join(self.config_dir, "backup_seqfiles"),)) + diff --git a/etc/init/slarchive.auto b/etc/init/slarchive.auto new file mode 100644 index 0000000..e69de29 diff --git a/etc/init/slarchive.py b/etc/init/slarchive.py new file mode 100644 index 0000000..b2aa3f5 --- /dev/null +++ b/etc/init/slarchive.py @@ -0,0 +1,212 @@ +from __future__ import print_function +import os, string, time, re, glob +import seiscomp.kernel, seiscomp.config + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + self.archive_dir = os.path.join(self.env.SEISCOMP_ROOT, "var", "lib", "archive") + self.config_dir = os.path.join(self.env.SEISCOMP_ROOT, "var", "lib", self.name) + self.certs_dir = os.path.join(self.env.SEISCOMP_ROOT, "var", "lib", "certs") + self.host = "127.0.0.1" + self.port = 18000 + self.buffer = 1000 + + + def _readConfig(self): + self.rc = {} + + cfg = seiscomp.config.Config() + cfg.readConfig(os.path.join(self.env.SEISCOMP_ROOT, "etc", "defaults", self.name + ".cfg")) + cfg.readConfig(os.path.join(self.env.SEISCOMP_ROOT, "etc", self.name + ".cfg")) + try: cfg.readConfig(os.path.join(os.environ['HOME'], ".seiscomp", self.name + ".cfg")) + except: pass + self.params = dict([(x, ",".join(cfg.getStrings(x))) for x in cfg.names()]) + + try: self.host = self.params['address'] + except: self.params['address'] = self.host + + try: self.port = int(self.params['port']) + except: self.params['port'] = self.port + + try: self.buffer = self.params['buffer'] + except: self.params['buffer'] = self.buffer + + try: + self.archive_dir = self.params['archive'] + if not os.path.isabs(self.archive_dir): + self.archive_dir = os.path.join(self.env.SEISCOMP_ROOT, self.archive_dir) + except: pass + self.params['archive'] = self.archive_dir + + try: + self.certs_dir = self.params['validation.certs'] + if not os.path.isabs(self.certs_dir): + self.certs_dir = os.path.join(self.env.SEISCOMP_ROOT, self.certs_dir) + except: pass + self.params['validation.certs'] = self.certs_dir + + self.params['slarchive._config_dir'] = self.config_dir + return cfg + + + def _run(self): + cfg = self._readConfig() + + mymodname = self.name + "_" + self.host + "_" + str(self.port) + + config_file = os.path.join(self.config_dir, self.name + ".streams") + run_dir = os.path.join(self.env.SEISCOMP_ROOT, "var", "run", self.name) + + try: os.makedirs(run_dir) + except: pass + + try: os.makedirs(self.archive_dir) + except: pass + + prog = "run_with_lock" + params = self.env.lockFile(self.name) + params += " " + self.name + ' -b -x "' + os.path.join(run_dir, mymodname + ".seq") + ':1000000"' + params += ' -SDS "%s"' % self.archive_dir + try: params += ' -B %d' % cfg.getInt('buffer') + except: pass + try: params += ' -nt %d' % cfg.getInt('networkTimeout') + except: params += ' -nt 900' + try: params += ' -nd %d' % cfg.getInt('delay') + except: pass + try: params += ' -i %d' % cfg.getInt('idleTimeout') + except: pass + try: params += ' -k %d' % cfg.getInt('keepalive') + except: pass + params += ' -Fi:1 -Fc:900 -l "%s" %s:%d' % (config_file,self.host,self.port) + try: + params += " -Cs %s" %cfg.getString('validation.mode') + try: params += ' -certs %s' % self.certs_dir + except: pass + except: pass + return self.env.start(self.name, prog, params, True) + + + def _processStation(self, key_dir, profile): + if profile: + station_config_file = "profile_%s" % (profile,) + else: + station_config_file = "station_%s_%s" % (self.net, self.sta) + + cfg = seiscomp.config.Config() + cfg.readConfig(os.path.join(key_dir, station_config_file)) + line = self.net + " " + self.sta + try: line += " " + cfg.getString("selectors") + except: pass + + keepdays = 30 + try: keepdays = cfg.getInt("keep") + except: pass + + rc = "STATION='%s'\n" % self.sta + \ + "NET='%s'\n" % self.net + \ + "ARCH_KEEP='%d'\n" % keepdays + + self.rc[self.net + "_" + self.sta] = rc + + return line + + def requiresKernelModules(self): + return False + + def updateConfig(self): + self._readConfig() + template_dir = os.path.join(self.env.SEISCOMP_ROOT, "share", "templates", "slarchive") + + # Create purge_datafiles script + tpl_paths = [template_dir] + purge_script = self.env.processTemplate('purge_datafiles.tpl', tpl_paths, self.params, True) + if purge_script: + try: os.makedirs(self.config_dir) + except: pass + fd = open(os.path.join(self.config_dir, "purge_datafiles"), "w") + fd.write(purge_script) + fd.close() + os.chmod(os.path.join(self.config_dir, "purge_datafiles"), 0o755) + else: + try: os.remove(os.path.join(self.config_dir, "purge_datafiles")) + except: pass + + rx_binding = re.compile(r'(?P[A-Za-z0-9_\.-]+)(:(?P[A-Za-z0-9_-]+))?$') + + bindings_dir = os.path.join(self.env.SEISCOMP_ROOT, "etc", "key") + key_dir = os.path.join(bindings_dir, self.name) + config_file = os.path.join(self.config_dir, self.name + ".streams") + + # Remove config file + try: os.remove(config_file) + except: pass + + config_fd = None + files = glob.glob(os.path.join(bindings_dir, "station_*")) + for f in files: + try: + (path, net, sta) = f.split('_')[-3:] + if not path.endswith("station"): + print("invalid path", f) + + except ValueError: + print("invalid path", f) + continue + + self.net = net + self.sta = sta + + fd = open(f) + line = fd.readline() + while line: + line = line.strip() + if not line or line[0] == '#': + line = fd.readline() + continue + + m = rx_binding.match(line) + if not m: + print("invalid binding in %s: %s" % (f, line)) + line = fd.readline() + continue + + if m.group('module') != self.name: + line = fd.readline() + continue + + profile = m.group('profile') + line = self._processStation(key_dir, profile) + if line: + if not config_fd: + try: os.makedirs(self.config_dir) + except: pass + try: config_fd = open(config_file, "w") + except: + raise Exception("Error: unable to create slarchive config file '%s'" % config_file) + config_fd.write("%s\n" % line) + break + + fd.close() + + # Create rc file + rc_files = glob.glob(os.path.join(self.config_dir, "rc_*")) + for (station_id, rc) in self.rc.items(): + fd = open(os.path.join(self.config_dir, "rc_%s" % (station_id,)), "w") + fd.write(rc) + fd.close() + + # Clean up unused rc_* files + for rc in rc_files: + if os.path.basename(rc)[3:] not in self.rc: + try: os.remove(rc) + except: pass + + return 0 + + def supportsAliases(self): + # The default handler does not support aliases + return True + + def printCrontab(self): + print("20 3 * * * %s/purge_datafiles >/dev/null 2>&1" % (self.config_dir)) diff --git a/etc/init/slmon.py b/etc/init/slmon.py new file mode 100644 index 0000000..a8ef808 --- /dev/null +++ b/etc/init/slmon.py @@ -0,0 +1,181 @@ +from __future__ import print_function +import os, string, time, re, glob +import seiscomp.kernel, seiscomp.config + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + self.config_dir = os.path.join(self.env.SEISCOMP_ROOT, "var", "lib", self.name) + self.rc_dir = os.path.join(self.env.SEISCOMP_ROOT, "var", "lib", "rc") + + + def _readConfig(self): + self.rc = {} + + cfg = seiscomp.config.Config() + cfg.readConfig(os.path.join(self.env.SEISCOMP_ROOT, "etc", "defaults", self.name + ".cfg")) + try: cfg.readConfig(os.path.join(self.env.SEISCOMP_ROOT, "etc", self.name + ".cfg")) + except: pass + try: cfg.readConfig(os.path.join(os.environ['HOME'], ".seiscomp", self.name + ".cfg")) + except: pass + + self.params = dict([(x, ",".join(cfg.getStrings(x))) for x in cfg.names()]) + + try: self.params['title'] + except: self.params['title'] = "SeedLink Monitor" + + try: self.params['refresh'] + except: self.params['refresh'] = "180" + + try: self.params['address'] + except: self.params['address'] = "127.0.0.1" + + try: int(self.params['port']) + except: self.params['port'] = 18000 + + try: self.params['email'] + except: self.params['email'] = "" + + try: self.params['wwwdir'] = self.params['wwwdir'].replace("@ROOTDIR@", self.env.SEISCOMP_ROOT).replace("@NAME@", self.name) + except: self.params['wwwdir'] = os.path.join(self.env.SEISCOMP_ROOT, "var", "run", "slmon") + + # yet to be implemente correctly: + # live seismograms, lin in footer: + try: self.params['liveurl'] + except: self.params['liveurl'] = "http://geofon.gfz-potsdam.de/waveform/liveseis.php?station=%s" + + # favicon: + try: self.params['icon'] + except: self.params['icon'] = "http://www.gfz-potsdam.de/favicon.ico" + + # link name to external site in footer + try: self.params['linkname'] + except: self.params['linkname'] = "GEOFON" + + # link to external site in footer + try: self.params['linkurl'] + except: self.params['linkurl'] = "http://www.gfz-potsdam.de/geofon/" + + return cfg + + + def _run(self): + station_file = os.path.join(self.env.SEISCOMP_ROOT, "var", "lib", self.name, "stations.ini") + config_file = os.path.join(self.env.SEISCOMP_ROOT, "var", "lib", self.name, "config.ini") + + prog = "run_with_lock" + params = self.env.lockFile(self.name) + params += " " + self.name + ' -s "' + station_file + '" -c "' + config_file + '"' + return self.env.start(self.name, prog, params, True) + + + def _processStation(self, key_dir, profile): + if profile: + station_config_file = "profile_%s" % (profile,) + else: + station_config_file = "station_%s_%s" % (self.net, self.sta) + + cfg = seiscomp.config.Config() + cfg.readConfig(os.path.join(key_dir, station_config_file)) + try: group = cfg.getString("group") + except: group = "local" + + description = "" + + try: + rc = seiscomp.config.Config() + rc.readConfig(os.path.join(self.rc_dir, "station_%s_%s" % (self.net, self.sta))) + description = rc.getString("description") + except Exception as e: + # Maybe the rc file doesn't exist, maybe there's no readable description. + pass + + if len(description) == 0: + description = self.sta + + content = "[" + self.net + "_" + self.sta + "]\n" + content += "net = %s\n" % self.net + content += "sta = %s\n" % self.sta + content += "info = %s\n" % description + content += "group = %s\n" % group + content += "type = real\n" + + return content + + + def updateConfig(self): + self._readConfig() + template_dir = os.path.join(self.env.SEISCOMP_ROOT, "share", "templates", self.name) + + # Create purge_datafiles script + tpl_paths = [template_dir] + config_file = self.env.processTemplate('config.tpl', tpl_paths, self.params, True) + if config_file: + try: os.makedirs(self.config_dir) + except: pass + fd = open(os.path.join(self.config_dir, "config.ini"), "w") + fd.write(config_file) + fd.close() + os.chmod(os.path.join(self.config_dir, "config.ini"), 0o755) + else: + try: os.remove(os.path.join(self.config_dir, "config.ini")) + except: pass + + rx_binding = re.compile(r'(?P[A-Za-z0-9_\.-]+)(:(?P[A-Za-z0-9_-]+))?$') + + bindings_dir = os.path.join(self.env.SEISCOMP_ROOT, "etc", "key") + key_dir = os.path.join(bindings_dir, self.name) + config_file = os.path.join(self.config_dir, "stations.ini") + + # Remove config file + try: os.remove(config_file) + except: pass + + config_fd = None + files = glob.glob(os.path.join(bindings_dir, "station_*")) + for f in files: + try: + (path, net, sta) = f.split('_')[-3:] + if not path.endswith("station"): + print("invalid path", f) + + except ValueError: + print("invalid path", f) + continue + + self.net = net + self.sta = sta + + fd = open(f) + line = fd.readline() + while line: + line = line.strip() + if not line or line[0] == '#': + line = fd.readline() + continue + + m = rx_binding.match(line) + if not m: + print("invalid binding in %s: %s" % (f, line)) + line = fd.readline() + continue + + if m.group('module') != self.name: + line = fd.readline() + continue + + profile = m.group('profile') + content = self._processStation(key_dir, profile) + if content: + if not config_fd: + try: os.makedirs(self.config_dir) + except: pass + try: config_fd = open(config_file, "w") + except: + raise Exception("Error: unable to create slarchive config file '%s'" % config_file) + config_fd.write("%s\n" % content) + break + + fd.close() + + return 0 diff --git a/etc/init/trunk.py b/etc/init/trunk.py new file mode 100644 index 0000000..6c60864 --- /dev/null +++ b/etc/init/trunk.py @@ -0,0 +1,109 @@ +import os +import glob +import time +import sys +import seiscomp.config +import seiscomp.kernel +import seiscomp.system +import seiscomp.bindings2cfg + + +def parseBindPort(bind): + bindToks = bind.split(':') + if len(bindToks) == 1: + return int(bindToks[0]) + elif len(bindToks) == 2: + return int(bindToks[1]) + else: + return -1 + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + # This is a config module which synchronizes bindings with the database + self.isConfigModule = True + + def updateConfig(self): + messaging = True + messagingPort = 18180 + messagingProtocol = 'scmp' + + try: + messaging = self.env.getBool("messaging.enable") + except: + pass + + # If messaging is disabled in kernel.cfg, do not do anything + if not messaging: + sys.stdout.write("- messaging disabled, nothing to do\n") + return 0 + + # Load scmaster configuration and figure the bind ports of scmaster out + cfg = seiscomp.config.Config() + seiscomp.system.Environment.Instance().initConfig(cfg, "scmaster") + + # First check the unencrypted port and prefer that + p = parseBindPort(cfg.getString("interface.bind")) + if p > 0: + messagingPort = p + + try: + bind = self.env.getString("messaging.bind") + bindToks = bind.split(':') + if len(bindToks) == 1: + messagingPort = int(bindToks[0]) + elif len(bindToks) == 2: + messagingPort = int(bindToks[1]) + else: + sys.stdout.write( + "E invalid messaging bind parameter: %s\n" % bind) + sys.stdout.write(" expected either 'port' or 'ip:port'\n") + return 1 + except: + pass + + # Otherwise check if ssl is enabled + else: + p = parseBindPort(cfg.getString("interface.ssl.bind")) + if p > 0: + messagingPort = p + messagingProtocol = 'scmps' + + # Synchronize database configuration + params = [self.name, '--console', '1', '-H', + '%s://localhost:%d/production' % (messagingProtocol, messagingPort)] + # Create the database update app and run it + # This app implements a seiscomp.client.Application and connects + # to localhost regardless of connections specified in global.cfg to + # prevent updating a remote installation by accident. + app = seiscomp.bindings2cfg.ConfigDBUpdater(len(params), params) + app.setConnectionRetries(3) + return app() + + def setup(self, setup_config): + cfgfile = os.path.join(self.env.SEISCOMP_ROOT, "etc", "global.cfg") + + cfg = seiscomp.config.Config() + cfg.readConfig(cfgfile) + try: + cfg.setString("datacenterID", setup_config.getString( + "global.meta.datacenterID")) + except: + cfg.remove("datacenterID") + + try: + cfg.setString("agencyID", setup_config.getString( + "global.meta.agencyID")) + except: + cfg.remove("agencyID") + + try: + cfg.setString("organization", setup_config.getString( + "global.meta.organization")) + except: + cfg.remove("organization") + + cfg.writeConfig() + + return 0 diff --git a/etc/inventory/README b/etc/inventory/README new file mode 100644 index 0000000..05e152d --- /dev/null +++ b/etc/inventory/README @@ -0,0 +1 @@ +Put your inventory XML files into this folder. diff --git a/etc/inventory/inventory_CX.xml b/etc/inventory/inventory_CX.xml new file mode 100644 index 0000000..a0803a8 --- /dev/null +++ b/etc/inventory/inventory_CX.xml @@ -0,0 +1,2 @@ + +GFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:CX1980:Trillium-Compact-120/GIPP/g=754Trillium CompactM/SGFZ:CX1980:Trillium-Compact-120/GIPP/g=754Trillium CompactM/SGFZ:CX1980:Trillium-Compact-120/GIPP/g=754Trillium CompactM/SGFZ:CX1980:Trillium-Compact-120/GIPP/g=754Trillium CompactM/SGFZ:CX1980:Trillium-Compact-120/GIPP/g=754Trillium CompactM/SGFZ:CX1980:Trillium-Compact-120/GIPP/g=754Trillium CompactM/SGFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.351906.10GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.351906.10GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.351906.10GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20181107193039.455567.321 ResponseFIR/20181107193039.45607.322 ResponseFIR/20181107193039.45676.323GFZ:CX1980:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20181107193039.455567.321 ResponseFIR/20181107193039.45607.322GFZ:CX1980:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20181107193039.455567.321 ResponseFIR/20181107193039.45607.322 ResponseFIR/20181107193039.45676.323GFZ:CX1980:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20181107193039.455567.321 ResponseFIR/20181107193039.45607.322 ResponseFIR/20181107193039.45676.323GFZ:CX1980:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20181107193039.455567.321 ResponseFIR/20181107193039.45607.322GFZ:CX1980:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20181107193039.455567.321 ResponseFIR/20181107193039.45607.322GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.350938.9GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20181107193039.353942.13GFZ:CX1980:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20181107193039.455567.321 ResponseFIR/20181107193039.45607.322 ResponseFIR/20181107193039.45676.323GFZ:CX1980:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20181107193039.455567.321 ResponseFIR/20181107193039.45607.322GFZ:CX1980:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20181107193039.455567.321 ResponseFIR/20181107193039.45607.322 ResponseFIR/20181107193039.45676.323GFZ:CX1980:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20181107193039.455567.321 ResponseFIR/20181107193039.45607.322 ResponseFIR/20181107193039.45676.323GFZ:CX1980:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20181107193039.455567.321 ResponseFIR/20181107193039.45607.322GFZ:CX1980:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20181107193039.455567.321 ResponseFIR/20181107193039.45607.322A15000.0260077000125(0,0) (0,0)(-0.037004,0.037016) (-0.037004,-0.037016) (-251.33,0) (-131.04,-467.29) (-131.04,467.29)A754.314.34493e+171611(0,0) (0,0) (-392,0) (-1960,0) (-1490,1740) (-1490,-1740)(-0.03691,0.03702) (-0.03691,-0.03702) (-343,0) (-370,467) (-370,-467) (-836,1522) (-836,-1522) (-4900,4700) (-4900,-4700) (-6900,0) (-15000,0)10132.6092432.6092467A-3.653417e-17 3.674881e-08 -4.270596e-07 1.14502e-06 -1.875941e-07 -3.372737e-07 2.787469e-06 -3.744026e-06 5.411719e-06 7.473363e-06 -0.0005177595 0.0002106768 4.632577e-05 -0.0006082222 0.001441747 -0.002406265 0.003225338 -0.00350639 0.002814411 -0.0007719714 -0.002805119 0.007778055 -0.01358146 0.01917646 -0.02297035 0.02403979 -0.02209865 0.008607339 0.01175252 -0.04477868 0.09649231 -0.1917548 0.5276523 0.724167 -0.1569053 0.04425742 0.003141684 -0.02667144 0.03615316 -0.03856867 0.03108417 -0.02352589 0.01532109 -0.007403983 0.001096454 0.003097965 -0.005193199 0.005561311 -0.004761101 0.003382132 -0.00192052 0.0007152175 7.677194e-05 -0.0004518973 0.0005026997 -0.000565037 -5.568005e-05 1.577356e-05 -1.419847e-06 8.149094e-07 6.807946e-07 -1.252728e-06 1.52435e-06 -2.833359e-07 -1.063838e-08 1.25712e-09 -5.429542e-111014.16074.160765A1.315493e-11 0.0001501065 0.01339681 0.1644292 0.5688094 0.5173835 -0.260836 -0.1220329 0.2571813 -0.2029026 0.07075881 0.03879666 -0.1143135 0.1354797 -0.1114475 0.06705481 -0.01927124 -0.02093129 0.04768056 -0.05933829 0.05757931 -0.04623331 0.02977715 -0.01248294 -0.002366075 0.01278821 -0.01846982 0.01879725 -0.01713865 0.01278199 -0.007675787 0.003255159 -8.947563e-05 -0.001778758 0.002596043 -0.002666169 0.002307403 -0.001770516 0.001218643 -0.0007460492 0.0003921752 -0.0001583665 2.437801e-05 3.807573e-05 -5.618048e-05 5.152771e-05 -3.856469e-05 2.530286e-05 -1.512465e-05 8.739795e-06 -4.648117e-06 1.376276e-06 7.042064e-07 2.241873e-07 -1.251026e-06 1.066771e-07 2.642876e-07 3.226638e-07 -8.074162e-08 -1.099048e-07 -3.325203e-08 1.388506e-08 1.056275e-08 2.577911e-09 -7.018623e-1010117.2184817.2184839A4.189518e-13 0.0003303176 0.001029213 -0.003141228 0.0002057093 0.001525213 -0.006231927 0.01048013 -0.01312025 0.01078214 -0.00144455 -0.01587295 0.0395074 -0.06510363 0.08537156 -0.08919134 0.05006189 0.8372328 0.2667231 -0.1666931 0.09528399 -0.05092177 0.01614584 0.007063624 -0.01838771 0.01994141 -0.01548951 0.008527354 -0.002557887 -0.001811026 0.002426493 -0.003757695 0.0004672927 0.0006330721 -1.568741e-06 -1.254798e-05 3.210405e-07 -2.633241e-08 -5.099975e-0810600180B2.67082e-08 8.32285e-08 1.94385e-07 3.76255e-07 6.24404e-07 8.88492e-07 1.03745e-06 8.17819e-07 -1.88601e-07 -2.59832e-06 -7.23828e-06 -1.51226e-05 -2.73811e-05 -4.51315e-05 -6.92953e-05 -0.000100369 -0.000138178 -0.000181643 -0.000228621 -0.000275855 -0.000319095 -0.000353401 -0.000373644 -0.000375171 -0.000354555 -0.000310343 -0.000243653 -0.000158518 -6.18352e-05 3.71254e-05 0.000127672 0.000199021 0.000241849 0.000249853 0.000221049 0.000158554 7.06486e-05 -3.0002e-05 -0.000127937 -0.000207141 -0.000253616 -0.00025789 -0.000217004 -0.000135557 -2.5537e-05 9.51778e-05 0.000205518 0.000284743 0.000316236 0.000290878 0.000209355 8.28454e-05 -6.81948e-05 -0.000217095 -0.000335486 -0.000398536 -0.000389979 -0.000305938 -0.00015671 3.40371e-05 0.000232714 0.000401493 0.000505302 0.000518878 0.000432514 0.000255237 1.45787e-05 -0.000247311 -0.000480763 -0.00063776 -0.000681539 -0.000594766 -0.000384515 -8.28166e-05 0.000257718 0.000572714 0.000797911 0.000881948 0.000797604 0.000549081 0.000173583 -0.000263593 -0.000679974 -0.000991109 -0.00112732 -0.00104878 -0.000755694 -0.000291221 0.000264018 0.000805406 0.0012236 0.00142618 0.00135744 0.00101227 0.000440728 -0.000258079 -0.000952548 -0.00150282 -0.00178842 -0.00173392 -0.00132727 -0.000626641 0.000246574 0.00112792 0.00184023 0.00222856 0.00219308 0.00171292 0.000855763 -0.00022999 -0.00133986 -0.00225101 -0.00276614 -0.00275516 -0.00218605 -0.00113772 0.000208611 0.00159948 0.00275586 0.00342803 0.00344823 0.00276999 0.00148557 -0.000183733 -0.00192399 -0.0033863 -0.00425529 -0.00431537 -0.00350103 -0.00192009 0.000156708 0.00233976 0.00419176 0.00531324 0.00542617 0.00443862 0.0024761 -0.000128937 -0.00289021 -0.00525541 -0.00671354 -0.00690136 -0.00568759 -0.00321628 0.000102091 0.00365383 0.00673075 0.00866603 0.00897195 0.00745242 0.0042659 -7.74563e-05 -0.004789 -0.00893601 -0.011616 -0.0121398 -0.0101886 -0.00591185 5.65209e-05 0.00667551 0.0126597 0.0167045 0.0177381 0.0151536 0.00898105 -4.09115e-05 -0.0105112 -0.0205336 -0.0279844 -0.0308424 -0.0275293 -0.0172056 3.13298e-05 0.0230828 0.0499361 0.0778964 0.103932 0.125084 0.138876 0.14366510500160B7.75144e-09 4.53772e-09 -5.3372e-08 -2.80899e-07 -9.11346e-07 -2.35194e-06 -5.23072e-06 -1.04113e-05 -1.89475e-05 -3.19532e-05 -5.0373e-05 -7.4666e-05 -0.000104445 -0.000138147 -0.000172838 -0.000204266 -0.000227231 -0.000236326 -0.000226965 -0.000196559 -0.000145589 -7.82785e-05 -2.59959e-06 7.05712e-05 0.000129149 0.00016217 0.000162334 0.000128167 6.52036e-05 -1.42938e-05 -9.3267e-05 -0.000153254 -0.000178617 -0.000160658 -0.000100551 -1.01526e-05 8.98321e-05 0.000174208 0.000219365 0.00020968 0.000142552 3.05356e-05 -0.000100376 -0.000216344 -0.000284081 -0.00028007 -0.000198144 -5.31045e-05 0.000121116 0.00027886 0.000374288 0.000374268 0.000269098 7.77583e-05 -0.000154417 -0.000365837 -0.000494488 -0.000495582 -0.000356151 -0.000102039 0.000205535 0.000483951 0.000650779 0.000647523 0.000458852 0.000121284 -0.000282282 -0.000642014 -0.000850322 -0.000833217 -0.000574947 -0.000128043 0.000395526 0.000851361 0.00110159 0.00105558 0.000700551 0.000112345 -0.000558753 -0.00112519 -0.00141357 -0.00131641 -0.000829312 -6.10946e-05 0.000788264 0.00147829 0.00179487 0.00161507 0.000950752 -4.37621e-05 -0.00110503 -0.00192868 -0.0022552 -0.00194959 -0.00105099 0.000224779 0.00153433 0.00249725 0.00280468 0.00231561 0.00111142 -0.000510507 -0.00210718 -0.00320865 -0.00345387 -0.00270515 -0.00110589 0.000939782 0.00286553 0.00409665 0.0042182 0.00310923 0.00100105 -0.00156353 -0.00386551 -0.00520724 -0.0051191 -0.0035152 -0.00074874 0.00245722 0.00519287 0.00661396 0.00619507 0.00390959 0.000278111 -0.00373934 -0.00698863 -0.00844379 -0.00751883 -0.0042774 0.000529093 0.00562055 0.00951518 0.0109444 0.00924347 0.00460376 -0.00189947 -0.00853129 -0.0133335 -0.0146694 -0.0117328 -0.00487461 0.00436211 0.0135399 0.0199084 0.0211419 0.0160512 0.00507765 -0.00958023 -0.0242775 -0.0346331 -0.0364985 -0.02697 -0.00520347 0.0271753 0.0661873 0.106146 0.140714 0.164141 0.1724271050080C4.032461e-05 7.45328e-05 0.0001234553 0.0001701887 0.0001973105 0.0001854891 0.0001193456 -5.723101e-06 -0.0001779232 -0.0003673259 -0.0005295104 -0.0006150085 -0.0005832354 -0.0004172837 -0.0001349516 0.000208333 0.000527709 0.0007281899 0.0007312587 0.0005019202 6.783176e-05 -0.0004771493 -0.000989158 -0.001308918 -0.001307358 -0.0009300168 -0.0002262541 0.0006483476 0.001461708 0.001963222 0.001956625 0.001367725 0.0002854628 -0.001040387 -0.002250679 -0.002969069 -0.002912737 -0.001990583 -0.0003573537 0.00159884 0.003340972 0.004323764 0.004155636 0.002736002 0.000323431 -0.002494752 -0.004934943 -0.006225197 -0.005836136 -0.003668966 -0.0001394092 0.003880228 0.007261232 0.008919356 0.008140252 0.00483705 -0.0003434785 -0.006115665 -0.01084778 -0.01299272 -0.01154995 -0.006430376 0.001391199 0.01000571 0.01698057 0.0199734 0.01740665 0.009029463 -0.003794969 -0.01818304 -0.03022295 -0.03578333 -0.03146898 -0.01550444 0.01167237 0.04726833 0.08650819 0.1234668 0.1521942 0.16789391980-01-01T00:00:00.0000ZIPOC Seismic Network (Integrated Plate boundary Observatory Chile)falsetrue{"type":"DOI","value":"10.14470/PK615318"}FDSNXML:Identifier/02006-02-21T00:00:00.0000ZIPOC Station Huatacondo, Chile-21.04323-69.4874900Chilefalsetrue2006-02-21T00:00:00.0000Z-21.04323-69.48749002006-02-21T00:00:00.0000Z162119052312012006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90523","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1621","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-02-21T00:00:00.0000Z16210905230100120-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90523","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1621","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-02-21T00:00:00.0000Z1621290523220129006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90523","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1621","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-02-21T00:00:00.0000Z1621090523020120-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90523","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1621","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-02-21T00:00:00.0000Z16212905232100129006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90523","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1621","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-02-21T00:00:00.0000Z1621190523110012006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90523","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1621","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-06T00:00:00.0000ZIPOC Station Salar Grande, Chile-21.31973-69.896031015Chilefalsetrue2006-05-06T00:00:00.0000Z-21.31973-69.8960310152006-05-06T00:00:00.0000Z134819052012010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90520","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1348","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-06T00:00:00.0000Z13480905200100100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90520","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1348","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-06T00:00:00.0000Z1348290520220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90520","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1348","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-06T00:00:00.0000Z1348090520020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90520","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1348","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-06T00:00:00.0000Z13482905202100109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90520","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1348","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-06T00:00:00.0000Z1348190520110010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90520","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1348","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-19T00:00:00.0000ZIPOC Station El Tigre, Chile-22.04847-69.75311460Chilefalsetrue2006-05-19T00:00:00.0000Z-22.04847-69.753114602006-05-19T00:00:00.0000Z162019053412010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90534","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1620","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-19T00:00:00.0000Z16200905340100100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90534","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1620","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-19T00:00:00.0000Z1620290534220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90534","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1620","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-19T00:00:00.0000Z1620090534020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90534","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1620","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-19T00:00:00.0000Z16202905342100109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90534","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1620","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-19T00:00:00.0000Z1620190534110010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90534","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1620","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-17T00:00:00.0000ZIPOC Station Mantos de la Luna, Chile-22.33369-70.149181520Chilefalsetrue2006-05-17T00:00:00.0000Z-22.33369-70.1491815202006-05-17T00:00:00.0000Z161819054712012006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90547","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1618","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-17T00:00:00.0000Z16180905470100120-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90547","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1618","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-17T00:00:00.0000Z1618290547220129006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90547","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1618","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-17T00:00:00.0000Z1618090547020120-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90547","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1618","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-17T00:00:00.0000Z16182905472100129006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90547","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1618","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-17T00:00:00.0000Z1618190547110012006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90547","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1618","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-19T00:00:00.0000ZIPOC Station Michilla, Chile-22.85283-70.202351150Chilefalsetrue2006-05-19T00:00:00.0000Z-22.85283-70.2023511502006-05-19T00:00:00.0000Z161919053112010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90531","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1619","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-19T00:00:00.0000Z16190905310100100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90531","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1619","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-19T00:00:00.0000Z1619290531220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90531","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1619","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-19T00:00:00.0000Z1619090531020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90531","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1619","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-19T00:00:00.0000Z16192905312100109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90531","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1619","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-05-19T00:00:00.0000Z1619190531110010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90531","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1619","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-03-04T00:00:00.0000ZIPOC Station Pedro de Valdivia, Chile-22.7058-69.571881440Chilefalsetrue2007-03-04T00:00:00.0000Z-22.7058-69.5718814402007-03-04T00:00:00.0000Z184318060912010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80609","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1843","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-03-04T00:00:00.0000Z18430806090100100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80609","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1843","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-03-04T00:00:00.0000Z1843280609220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80609","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1843","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-03-04T00:00:00.0000Z1843080609020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80609","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1843","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-03-04T00:00:00.0000Z18432806092100109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80609","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1843","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-03-04T00:00:00.0000Z1843180609110010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80609","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1843","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-02-27T00:00:00.0000ZIPOC Station Cerro Tatas, Chile-21.72667-69.886181570Chilefalsetrue2007-02-27T00:00:00.0000Z-21.72667-69.8861815702007-02-27T00:00:00.0000Z184418060712010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80607","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1844","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-02-27T00:00:00.0000Z18440806070100100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80607","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1844","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-02-27T00:00:00.0000Z1844280607220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80607","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1844","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-02-27T00:00:00.0000Z1844080607020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80607","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1844","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-02-27T00:00:00.0000Z18442806072100109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80607","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1844","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-02-27T00:00:00.0000Z1844180607110010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80607","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1844","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-03-01T00:00:00.0000ZIPOC Station Macaya, Chile-20.14112-69.15343060Chilefalsetrue2007-03-01T00:00:00.0000Z-20.14112-69.153430602007-03-01T00:00:00.0000Z184518060512010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80605","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1845","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-03-01T00:00:00.0000Z18450806050100100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80605","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1845","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-03-01T00:00:00.0000Z1845280605220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80605","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1845","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-03-01T00:00:00.0000Z1845080605020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80605","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1845","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-03-01T00:00:00.0000Z18452806052100109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80605","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1845","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2007-03-01T00:00:00.0000Z1845180605110010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"80605","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"1845","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2008-02-06T00:00:00.0000ZIPOC Station Quillagua, Chile-21.79638-69.241921530Chilefalsetrue2008-02-06T00:00:00.0000Z-21.79638-69.2419215302008-02-06T00:00:00.0000Z2627190720120100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90720","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2627","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-02-06T00:00:00.0000Z26270907200100100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90720","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2627","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-02-06T00:00:00.0000Z26272907202201090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90720","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2627","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-02-06T00:00:00.0000Z2627090720020100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90720","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2627","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-02-06T00:00:00.0000Z262729072021001090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90720","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2627","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-02-06T00:00:00.0000Z26271907201100100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90720","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2627","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-02-03T00:00:00.0000ZIPOC Station Juan Lopez, Chile-23.51343-70.55408250Chilefalsetrue2008-02-03T00:00:00.0000Z-23.51343-70.554082502008-02-03T00:00:00.0000Z2628190722120100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90722","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2628","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-02-03T00:00:00.0000Z26280907220100100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90722","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2628","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-02-03T00:00:00.0000Z26282907222201090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90722","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2628","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-02-03T00:00:00.0000Z2628090722020100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90722","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2628","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-02-03T00:00:00.0000Z262829072221001090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90722","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2628","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-02-03T00:00:00.0000Z26281907221100100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90722","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2628","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-12-23T00:00:00.0000ZIPOC Station Quebrada Aricilda, Chile-19.76096-69.655821400Chilefalsetrue2017-04-18T00:00:00.0000Z-19.76096-69.6558214002017-04-18T00:00:00.0000Z2629190713140100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90713","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2629","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2017-04-18T00:00:00.0000Z26290907130100100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90713","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2629","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2017-04-18T00:00:00.0000Z26292907132401090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90713","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2629","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2017-04-18T00:00:00.0000Z2629090713040100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90713","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2629","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2017-04-18T00:00:00.0000Z262929071321001090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90713","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2629","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2017-04-18T00:00:00.0000Z26291907131100100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"90713","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2629","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2011-03-30T00:00:00.0000ZIPOC Station Cerro Caramaca near Arica, Chile-18.61406-70.32809908Chilefalsetrue2011-03-30T00:00:00.0000Z-18.61406-70.328099082011-03-30T00:00:00.0000Zxxxx120531120100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"20531","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2011-03-30T00:00:00.0000Zxxxx0205310100100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"20531","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2011-03-30T00:00:00.0000Zxxxx2205312201090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"20531","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2011-03-30T00:00:00.0000Zxxxx020531020100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"20531","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2011-03-30T00:00:00.0000Zxxxx22053121001090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"20531","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2011-03-30T00:00:00.0000Zxxxx1205311100100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"20531","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-12-23T00:00:00.0000ZIPOC Station Cerro Chaquipina, Chile-18.33585-69.50164480Chilefalsetrue2008-12-23T00:00:00.0000Z-18.33585-69.501644802008-12-23T00:00:00.0000Zxxxx170802120100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-12-23T00:00:00.0000Zxxxx0708020100100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-12-23T00:00:00.0000Zxxxx2708022201090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-12-23T00:00:00.0000Zxxxx070802020100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-12-23T00:00:00.0000Zxxxx27080221001090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-12-23T00:00:00.0000Zxxxx1708021100100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2010-03-27T00:00:00.0000ZIPOC Station European Southern Observatory Cerro Paranal, Chile-24.62597-70.403792630Chilefalsetrue2010-03-27T00:00:00.0000Z-24.62597-70.4037926302010-03-27T00:00:00.0000Zxxxx1yyyy12010005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:CX1980:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20181107193039.45543.320"}FDSNXML:DataLogger2010-03-27T00:00:00.0000Zxxxx0yyyy0100100-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:CX1980:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20181107193039.45543.320"}FDSNXML:DataLogger2010-03-27T00:00:00.0000Zxxxx2yyyy220109005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:CX1980:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20181107193039.45543.320"}FDSNXML:DataLogger2010-03-27T00:00:00.0000Zxxxx0yyyy020100-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:CX1980:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20181107193039.45543.320"}FDSNXML:DataLogger2010-03-27T00:00:00.0000Zxxxx2yyyy2100109005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:CX1980:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20181107193039.45543.320"}FDSNXML:DataLogger2010-03-27T00:00:00.0000Zxxxx1yyyy110010005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:CX1980:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20181107193039.45543.320"}FDSNXML:DataLogger2010-03-27T00:00:00.0000ZIPOC Station Sierra Gorda, Chile-23.208335-69.470921830Chilefalsetrue2010-03-27T00:00:00.0000Z-23.208335-69.4709218302010-03-27T00:00:00.0000Z26281yyyy120100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2628","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2010-03-27T00:00:00.0000Z26280yyyy0100100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2628","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2010-03-27T00:00:00.0000Z26282yyyy2201090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2628","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2010-03-27T00:00:00.0000Z26280yyyy020100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2628","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2010-03-27T00:00:00.0000Z26282yyyy21001090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2628","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2010-03-27T00:00:00.0000Z26281yyyy1100100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"2628","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-12-23T00:00:00.0000ZIPOC Station Cerro Chaquipina new, Chile-18.3351-69.507674480Chilefalsetrue2008-12-23T00:00:00.0000Z-18.3351-69.5076744802008-12-23T00:00:00.0000Zxxxx170802120100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-12-23T00:00:00.0000Zxxxx0708020100100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-12-23T00:00:00.0000Zxxxx2708022201090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-12-23T00:00:00.0000Zxxxx070802020100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-12-23T00:00:00.0000Zxxxx27080221001090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2008-12-23T00:00:00.0000Zxxxx1708021100100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2015-12-04T00:00:00.0000ZIPOC Station Visviri, Chile-17.58954-69.484155Chilefalsetrue2015-12-04T00:00:00.0000Z-17.58954-69.4841552015-12-04T00:00:00.0000Zxxxx170802120100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2015-12-04T00:00:00.0000Zxxxx0708020100100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2015-12-04T00:00:00.0000Zxxxx2708022201090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2015-12-04T00:00:00.0000Zxxxx070802020100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2015-12-04T00:00:00.0000Zxxxx27080221001090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2015-12-04T00:00:00.0000Zxxxx1708021100100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"70802","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2017-03-11T00:00:00.0000ZIPOC Station Pico de Oro, Chile-23.90478-69.2906472480Chilefalsetrue2017-03-11T00:00:00.0000Z-23.90478-69.29064724802017-03-11T00:00:00.0000Z46261110654120100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"110654","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"4626","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2017-03-11T00:00:00.0000Z462601106540100100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"110654","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"4626","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2017-03-11T00:00:00.0000Z462621106542201090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"110654","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"4626","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2017-03-11T00:00:00.0000Z46260110654020100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"110654","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"4626","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2017-03-11T00:00:00.0000Z4626211065421001090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"110654","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"4626","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2017-03-11T00:00:00.0000Z462611106541100100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"110654","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"4626","resourceId":"Datalogger/20181107193039.422341.201"}FDSNXML:DataLogger2006-11-21T00:00:00.0000ZIPOC Station Humberstone, Chile-20.27822-69.887911152Chilefalsetrue2006-11-21T00:00:00.0000Z-20.27822-69.8879111522006-11-21T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-11-21T00:00:00.0000Zxxxx0yyyy0100100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-11-21T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-11-21T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-11-21T00:00:00.0000Zxxxx2yyyy2100109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-11-21T00:00:00.0000Zxxxx1yyyy110010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-11-27T00:00:00.0000ZIPOC Station Minimi, Chile-19.13108-69.595532304Chilefalsetrue2006-11-27T00:00:00.0000Z-19.13108-69.5955323042006-11-27T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-11-27T00:00:00.0000Zxxxx0yyyy0100100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-11-27T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-11-27T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-11-27T00:00:00.0000Zxxxx2yyyy2100109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-11-27T00:00:00.0000Zxxxx1yyyy110010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-12-01T00:00:00.0000ZIPOC Station Patache, Chile-20.82071-70.15288832Chilefalsetrue2006-12-01T00:00:00.0000Z-20.82071-70.152888322006-12-01T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-12-01T00:00:00.0000Zxxxx0yyyy0100100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-12-01T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-12-01T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-12-01T00:00:00.0000Zxxxx2yyyy2100109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-12-01T00:00:00.0000Zxxxx1yyyy110010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-11-28T00:00:00.0000ZIPOC Station Pisagua, Chile-19.59717-70.12305966Chilefalsetrue2006-11-28T00:00:00.0000Z-19.59717-70.123059662006-11-28T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-11-28T00:00:00.0000Zxxxx0yyyy0100100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-11-28T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-11-28T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-11-28T00:00:00.0000Zxxxx2yyyy2100109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2006-11-28T00:00:00.0000Zxxxx1yyyy110010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:CX1980:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20181107193039.348267.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:CX1980:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20181107193039.350384.8"}FDSNXML:DataLogger2018-11-05T00:00:00.0000ZIPOC Station Salar de Carmen, Chile-23.51152-70.24953700Chilefalsetrue2018-11-05T00:00:00.0000Z-23.51152-70.249537002018-11-05T00:00:00.0000Zxxxx1yyyy12011002956856001M/Sfalse{"type":"VBB","description":"GFZ:CX1980:Trillium-Compact-120/GIPP/g=754","model":"Trillium Compact","resourceId":"Sensor/20181107193039.533437.549"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:CX1980:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20181107193039.45543.320"}FDSNXML:DataLogger2018-11-05T00:00:00.0000Zxxxx0yyyy0100110-902956856001M/Sfalse{"type":"VBB","description":"GFZ:CX1980:Trillium-Compact-120/GIPP/g=754","model":"Trillium Compact","resourceId":"Sensor/20181107193039.533437.549"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:CX1980:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20181107193039.45543.320"}FDSNXML:DataLogger2018-11-05T00:00:00.0000Zxxxx2yyyy220119002956856001M/Sfalse{"type":"VBB","description":"GFZ:CX1980:Trillium-Compact-120/GIPP/g=754","model":"Trillium Compact","resourceId":"Sensor/20181107193039.533437.549"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:CX1980:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20181107193039.45543.320"}FDSNXML:DataLogger2018-11-05T00:00:00.0000Zxxxx0yyyy020110-902956856001M/Sfalse{"type":"VBB","description":"GFZ:CX1980:Trillium-Compact-120/GIPP/g=754","model":"Trillium Compact","resourceId":"Sensor/20181107193039.533437.549"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:CX1980:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20181107193039.45543.320"}FDSNXML:DataLogger2018-11-05T00:00:00.0000Zxxxx2yyyy2100119002956856001M/Sfalse{"type":"VBB","description":"GFZ:CX1980:Trillium-Compact-120/GIPP/g=754","model":"Trillium Compact","resourceId":"Sensor/20181107193039.533437.549"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:CX1980:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20181107193039.45543.320"}FDSNXML:DataLogger2018-11-05T00:00:00.0000Zxxxx1yyyy110011002956856001M/Sfalse{"type":"VBB","description":"GFZ:CX1980:Trillium-Compact-120/GIPP/g=754","model":"Trillium Compact","resourceId":"Sensor/20181107193039.533437.549"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:CX1980:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20181107193039.45543.320"}FDSNXML:DataLogger diff --git a/etc/inventory/inventory_GE.xml b/etc/inventory/inventory_GE.xml new file mode 100644 index 0000000..2a695d1 --- /dev/null +++ b/etc/inventory/inventory_GE.xml @@ -0,0 +1,2 @@ + +Streckeisen STS-1VBB w/E300M/S{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5M/S{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 240Trillium 240NanometricsM/SNanometrics Trillium 240Trillium 240NanometricsM/SNanometrics Trillium 240Trillium 240NanometricsM/SStreckeisen STS-1VBB w/E300M/S{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5M/S{"unit":"Velocity in Meters Per Second"}GFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:Trillium-360/GSNv2/g=1998M/SGFZ:GE1993:Trillium-360/GSNv2/g=1998M/SGFZ:GE1993:Trillium-360/GSNv2/g=1998M/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:Trillium-240/g=1200Trillium-240M/SGFZ:GE1993:Trillium-240/g=1200Trillium-240M/SGFZ:GE1993:Trillium-240/g=1200Trillium-240M/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-1/VBB/g=2400STS-1/VBBStreckeisenM/SGFZ:GE1993:STS-1/VBB/g=2400STS-1/VBBStreckeisenM/SGFZ:GE1993:STS-1/VBB/g=2400STS-1/VBBStreckeisenM/SGFZ:GE1993:CMG-3T/100/g=1500CMG-3T/100GuralpM/SGFZ:GE1993:CMG-3T/100/g=1500CMG-3T/100GuralpM/SGFZ:GE1993:CMG-3T/100/g=1500CMG-3T/100GuralpM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:Trillium-240/g=1200Trillium-240M/SGFZ:GE1993:Trillium-240/g=1200Trillium-240M/SGFZ:GE1993:Trillium-240/g=1200Trillium-240M/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:CMG-3ESP/30/NPW/g=2000M/SGFZ:GE1993:CMG-3ESP/30/NPW/g=2000M/SGFZ:GE1993:CMG-3ESP/30/NPW/g=2000M/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:Trillium-240/g=1200Trillium-240M/SGFZ:GE1993:Trillium-240/g=1200Trillium-240M/SGFZ:GE1993:Trillium-240/g=1200Trillium-240M/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=20000STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=20000STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=20000STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:Trillium-240/g=1200Trillium-240M/SGFZ:GE1993:Trillium-240/g=1200Trillium-240M/SGFZ:GE1993:Trillium-240/g=1200Trillium-240M/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SStreckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Metrozet M2166 VBB seismometerm/s{"unit":"Velocity in Meters Per Second"}Metrozet M2166 VBB seismometerm/s{"unit":"Velocity in Meters Per Second"}Metrozet M2166 VBB seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300M/S{"unit":"Velocity in Meters Per Second"}T120 post hole, quietM/S{"unit":"Velocity in Meters Per Second"}M/S{"unit":"Velocity in Meters Per Second"}M/S{"unit":"Velocity in Meters Per Second"}M/S{"unit":"Velocity in Meters Per Second"}M/S{"unit":"Velocity in Meters Per Second"}M/S{"unit":"Velocity in Meters Per Second"}M/S{"unit":"Velocity in Meters Per Second"}GFZ:GE1993:STS-2.5/g=1500VBBStreckeisenM/SGFZ:GE1993:STS-2.5/g=1500VBBStreckeisenM/SGFZ:GE1993:STS-2.5/g=1500VBBStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SM/S{"unit":"Velocity in Meters Per Second"}M/S{"unit":"Velocity in Meters Per Second"}M/S{"unit":"Velocity in Meters Per Second"}GFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/SGFZ:GE1993:STS-2/N/g=1500STS-2/NStreckeisenM/S16777200ResponseFIR/20220328134006.455525.25516777200ResponseFIR/20220328134006.471008.469CentaurNanometrics1NRL/Nanometrics/CENTAUR.1.20.OFF/3 NRL/Nanometrics/CENTAUR.1.20.OFF/4 NRL/Nanometrics/CENTAUR.1.20.OFF/5 NRL/Nanometrics/CENTAUR.1.20.OFF/6 NRL/Nanometrics/CENTAUR.1.20.OFF/7CentaurNanometrics1NRL/Nanometrics/CENTAUR.1.20.OFF/3 NRL/Nanometrics/CENTAUR.1.20.OFF/4 NRL/Nanometrics/CENTAUR.1.20.OFF/5 NRL/Nanometrics/CENTAUR.1.20.OFF/6 NRL/Nanometrics/CENTAUR.1.20.OFF/7CentaurNanometrics1NRL/Nanometrics/CENTAUR.1.20.OFF/3 NRL/Nanometrics/CENTAUR.1.20.OFF/4 NRL/Nanometrics/CENTAUR.1.20.OFF/5 NRL/Nanometrics/CENTAUR.1.20.OFF/6 NRL/Nanometrics/CENTAUR.1.20.OFF/716777200ResponseFIR/20220328134006.455525.25516777200ResponseFIR/20220328134006.471008.469GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666782.78GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666782.78GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666782.78GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:DM24/g=392157DM24Guralp3921570ResponseFIR/20190409123644.947581.1416 ResponseFIR/20190409123644.947697.1417 ResponseFIR/20190409123644.947697.1417 ResponseFIR/20190409123644.947697.1417 ResponseFIR/20190409123644.947935.1420 ResponseFIR/20190409123644.94811.1421 ResponseFIR/20190409123644.948825.1422 ResponseFIR/20190409123644.949145.1423GFZ:GE1993:DM24/g=392157DM24Guralp3921570ResponseFIR/20190409123644.947581.1416 ResponseFIR/20190409123644.947697.1417 ResponseFIR/20190409123644.947697.1417 ResponseFIR/20190409123644.947697.1417 ResponseFIR/20190409123644.947935.1420 ResponseFIR/20190409123644.94811.1421 ResponseFIR/20190409123644.948825.1422 ResponseFIR/20190409123644.949145.1423GFZ:GE1993:DM24/g=392157DM24Guralp3921570ResponseFIR/20190409123644.947581.1416 ResponseFIR/20190409123644.947697.1417 ResponseFIR/20190409123644.947697.1417 ResponseFIR/20190409123644.947697.1417 ResponseFIR/20190409123644.947935.1420 ResponseFIR/20190409123644.94811.1421 ResponseFIR/20190409123644.948825.1422 ResponseFIR/20190409123644.949145.1423GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666782.78GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666782.78GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666782.78GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:PS6-HG/g=1000000PS6-SCEarthData10000000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-HG/g=1000000PS6-SCEarthData10000000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-HG/g=1000000PS6-SCEarthData10000000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/HR/g=1677720Q330Quanterra16777200ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.7916777200ResponseFIR/20220328134006.455525.25516777200ResponseFIR/20220328134006.455525.25516777200ResponseFIR/20220328134006.455525.25516777200ResponseFIR/20220328134006.471008.46916777200ResponseFIR/20220328134006.471008.46916777200ResponseFIR/20220328134006.471008.4694194300ResponseFIR/20220328134006.471008.4694194300ResponseFIR/20220328134006.471008.4694194300ResponseFIR/20220328134006.471008.46916777200ResponseFIR/20220328134006.455525.25516777200ResponseFIR/20220328134006.471008.4694000000ResponsePAZ/20210527151050.046942.31ResponseFIR/20210527151050.046994.32 ResponseFIR/20210527151050.0473.33 ResponseFIR/20210527151050.047798.34 ResponseFIR/20210527151050.048026.354000000ResponsePAZ/20210527151050.046942.31ResponseFIR/20210527151050.046994.32 ResponseFIR/20210527151050.0473.33 ResponseFIR/20210527151050.047798.34 ResponseFIR/20210527151050.048026.354000000ResponsePAZ/20210527151050.046942.31ResponseFIR/20210527151050.046994.32 ResponseFIR/20210527151050.0473.33 ResponseFIR/20210527151050.047798.34 ResponseFIR/20210527151050.048026.354000000ResponsePAZ/20210527151050.046942.31ResponseFIR/20210527151050.046994.32 ResponseFIR/20210527151050.0473.33 ResponseFIR/20210527151050.047798.34 ResponseFIR/20210527151050.048026.354000000ResponsePAZ/20210527151050.046942.31ResponseFIR/20210527151050.046994.32 ResponseFIR/20210527151050.0473.33 ResponseFIR/20210527151050.047798.34 ResponseFIR/20210527151050.048026.354000000ResponsePAZ/20210527151050.046942.31ResponseFIR/20210527151050.046994.32 ResponseFIR/20210527151050.0473.33 ResponseFIR/20210527151050.047798.34 ResponseFIR/20210527151050.048026.35GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:Q330/N/g=419430Q330Quanterra4194300ResponseFIR/20190409123644.666944.79GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.384000000ResponsePAZ/20210527151050.046942.31ResponseFIR/20210527151050.046994.32 ResponseFIR/20210527151050.0473.33 ResponseFIR/20210527151050.047798.34 ResponseFIR/20210527151050.048026.354000000ResponsePAZ/20210527151050.046942.31ResponseFIR/20210527151050.046994.32 ResponseFIR/20210527151050.0473.33 ResponseFIR/20210527151050.047798.34 ResponseFIR/20210527151050.048026.354000000ResponsePAZ/20210527151050.046942.31ResponseFIR/20210527151050.046994.32 ResponseFIR/20210527151050.0473.33 ResponseFIR/20210527151050.047798.34 ResponseFIR/20210527151050.048026.35GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38GFZ:GE1993:PS6-SC/g=392000PS6-SCEarthData3920000ResponseFIR/20190409123644.659389.36 ResponseFIR/20190409123644.659661.37 ResponseFIR/20190409123644.659895.38A24230.053946.580.0546(0,0) (0,0) (-0.0245718,0) (-0.0245718,0)(-0.0138083,0.01234) (-0.0138083,-0.01234) (-0.0278758,0) (-0.0170315,0) (-39.18,49.12) (-39.18,-49.12)A14590.0245.68080.0267(0,0) (0,0) (-9.42478,0) (-628.319,0) (-565.487,979.452) (-565.487,-979.452)(-0.0373158,-0.0367) (-0.0373158,0.0367) (-9.73894,0) (-219.911,138.23) (-219.911,-138.23) (-219.911,684.867) (-219.911,-684.867)A118914.72488e+171611(0,0) (0,0) (-72.5,0) (-159.3,0) (-251,0) (-3270,0)(-0.017699,0.017604) (-0.017699,-0.017604) (-85.3,0) (-155.4,210.8) (-155.4,-210.8) (-713,0) (-1140,0) (-4300,0) (-5800,0) (-4300,4400) (-4300,-4400)A25890.053947.550.0546(0,0) (0,0) (-0.0245063,0) (-0.0245063,0)(-0.0132704,0.01234) (-0.0132704,-0.01234) (-0.0190607,0) (-0.0278488,0) (-39.18,49.12) (-39.18,-49.12)A14940.0245.83980.0267(0,0) (0,0) (-9.42478,0) (-628.319,0) (-565.487,979.452) (-565.487,-979.452)(-0.0376217,-0.0362) (-0.0376217,0.0362) (-9.73894,0) (-219.911,138.23) (-219.911,-138.23) (-219.911,684.867) (-219.911,-684.867)A15000.0260077000125(0,0) (0,0)(-0.037004,0.037016) (-0.037004,-0.037016) (-251.33,0) (-131.04,-467.29) (-131.04,467.29)A1998.412.0863e+161611(0,0) (0,0) (-31.6,0) (-143.4,0) (-350,0) (-3100,0)(-0.012293,0.012315) (-0.012293,-0.012315) (-33.4,0) (-112.9,0) (-137.3,286.6) (-137.3,-286.6) (-1420,900) (-1420,-900) (-2400,0) (-6300,300) (-6300,-300)A12001451700157(0,0) (0,0) (-91.66,0) (-160.1,0) (-3207,0)(-0.0177,0.0176) (-0.0177,-0.0176) (-126.7,0) (-192,259) (-192,-259) (-557.7,1143) (-557.7,-1143)A24000.023948.58124(0,0) (0,0)(-0.01234,0.01234) (-0.01234,-0.01234) (-39.18,49.12) (-39.18,-49.12)A15000.041122(0,0) (0,0)(-0.04442,0.04442) (-0.04442,-0.04442)A20001571508000125(0,0) (0,0)(-0.1486,0.1486) (-0.1486,-0.1486) (-502.65,0) (-1005,0) (-1131,0)A200000.0260077000125(0,0) (0,0)(-0.037004,0.037016) (-0.037004,-0.037016) (-251.33,0) (-131.04,-467.29) (-131.04,467.29)A1922.380.026248.850.0224(0,0) (0,0)(-0.0124509,-0.0115219) (-0.0124509,0.0115219) (-36.4093,-70.0956) (-36.4093,70.0956)100A1947.110.026894.120.0224(0,0) (0,0)(-0.0126727,-0.0112779) (-0.0126727,0.0112779) (-36.0256,-74.7035) (-36.0256,74.7035)100A2457.260.028700.470.0224(0,0) (0,0)(-0.0124712,-0.0114189) (-0.0124712,0.0114189) (-35.2188,-86.2837) (-35.2188,86.2837)100A14590.021.46577e+150.02611(0,0) (0,0) (-9.0419,0) (-238.8,0) (-433.009,-485.003) (-433.009,485.003)(-0.0370787,-0.0365365) (-0.0370787,0.0365365) (-9.42974,0) (-184.437,0) (-309.119,0) (-92.2683,-409.334) (-92.2683,409.334) (-458.524,0) (-1087.56,-1046.86) (-1087.56,1046.86) (-13300,0)100A22040.05129793000.05710(0,0) (0,0) (-0.026784,0) (-0.026784,0) (-4.40676,-192.023) (-4.40676,192.023) (-237.507,0)(-0.01234,-0.01234) (-0.01234,0.01234) (-0.021995,0) (-0.0242718,0) (-110.346,0) (-66.2543,-133.311) (-66.2543,133.311) (-4.83282,-192.785) (-4.83282,192.785) (-1252.79,0)100A23870.05129793000.05710(0,0) (0,0) (-0.026784,0) (-0.026784,0) (-4.40676,-192.023) (-4.40676,192.023) (-237.507,0)(-0.01234,-0.01234) (-0.01234,0.01234) (-0.021995,0) (-0.0242718,0) (-110.346,0) (-66.2543,-133.311) (-66.2543,133.311) (-4.83282,-192.785) (-4.83282,192.785) (-1252.79,0)100A24220.05337557000.05710(0,0) (0,0) (-0.0411445,0) (-0.0411445,0) (-137.657,0) (-0.164571,-291.55) (-0.164571,291.55)(-0.0185043,0) (-0.0169916,-0.01234) (-0.0169916,0.01234) (-0.0545579,0) (-82.0991,0) (-58.9459,-79.5451) (-58.9459,79.5451) (-758.566,0) (-17.9713,-803.788) (-17.9713,803.788)100A24040.053949.670.0546(0,0) (0,0) (-0.0238199,0) (-0.0238199,0)(-0.0131766,0.01234) (-0.0131766,-0.01234) (-0.0153551,0) (-0.030809,0) (-39.18,49.12) (-39.18,-49.12)A11770.028.46587e+170.02611(0,0) (0,0) (-31.63,0) (-160,0) (-350,0) (-3177,0)(-0.0368059,0.0362649) (-0.0368059,-0.0362649) (-32.55,0) (-142,0) (-364,404) (-364,-404) (-1260,0) (-4900,-5200) (-4900,5200) (-7100,-1700) (-7100,1700)A0.9925421110010132.6132.6167A-3.65342e-17 3.67488e-08 -4.2706e-07 1.14502e-06 -1.87594e-07 -3.37274e-07 2.78747e-06 -3.74403e-06 5.41172e-06 7.47336e-06 -0.000517759 0.000210677 4.63258e-05 -0.000608222 0.00144175 -0.00240627 0.00322534 -0.00350639 0.00281441 -0.000771971 -0.00280512 0.00777805 -0.0135815 0.0191765 -0.0229704 0.0240398 -0.0220986 0.00860734 0.0117525 -0.0447787 0.0964923 -0.191755 0.527652 0.724167 -0.156905 0.0442574 0.00314168 -0.0266714 0.0361532 -0.0385687 0.0310842 -0.0235259 0.0153211 -0.00740398 0.00109645 0.00309797 -0.0051932 0.00556131 -0.0047611 0.00338213 -0.00192052 0.000715218 7.67719e-05 -0.000451897 0.0005027 -0.000565037 -5.568e-05 1.57736e-05 -1.41985e-06 8.14909e-07 6.80795e-07 -1.25273e-06 1.52435e-06 -2.83336e-07 -1.06384e-08 1.25712e-09 -5.42954e-1110117.218417.218439A4.18952e-13 0.000330318 0.00102921 -0.00314123 0.000205709 0.00152521 -0.00623193 0.0104801 -0.0131202 0.0107821 -0.00144455 -0.0158729 0.0395074 -0.0651036 0.0853716 -0.0891913 0.0500619 0.837233 0.266723 -0.166693 0.095284 -0.0509218 0.0161458 0.00706362 -0.0183877 0.0199414 -0.0154895 0.00852735 -0.00255789 -0.00181103 0.00242649 -0.00375769 0.000467293 0.000633072 -1.56874e-06 -1.2548e-05 3.21041e-07 -2.63324e-08 -5.09997e-0840000001001B1101581.9999981.9999983B-4.047908e-10 -1.390291e-10 6.728001e-10 2.757972e-09 7.546507e-09 1.766681e-08 3.769363e-08 7.523132e-08 1.424254e-07 2.57999e-07 4.499037e-07 7.586555e-07 1.241386e-06 1.97658e-06 3.069389e-06 4.657284e-06 6.915693e-06 1.00631e-05 1.436487e-05 2.013499e-05 2.773459e-05 3.756609e-05 5.006174e-05 6.566517e-05 8.480489e-05 0.0001078587 0.0001351084 0.0001666851 0.0002025053 0.0002421999 0.000285038 0.0003298504 0.000374956 0.0004180986 0.0004564003 0.0004863386 0.0005037566 0.0005039131 0.0004815801 0.0004311946 0.000347068 0.0002236552 5.588177e-05 -0.0001604755 -0.0004283654 -0.0007490084 -0.001121432 -0.001542013 -0.002004062 -0.002497462 -0.003008406 -0.00351925 -0.004008517 -0.00445107 -0.004818468 -0.005079526 -0.005201073 -0.005148892 -0.004888844 -0.004388128 -0.003616642 -0.002548397 -0.00116293 0.0005533497 0.002605953 0.004991292 0.007695918 0.01069608 0.01395761 0.01743625 0.02107831 0.02482173 0.02859758 0.03233183 0.0359474 0.03936644 0.04251273 0.04531409 0.04770474 0.04962748 0.05103565 0.05189471 0.052183451010939394B8.46923e-10 2.27422e-09 4.538951e-09 6.873791e-09 7.10904e-09 7.627841e-10 -1.98301e-08 -6.609942e-08 -1.531138e-07 -2.982495e-07 -5.181519e-07 -8.236356e-07 -1.212476e-06 -1.660555e-06 -2.112474e-06 -2.473531e-06 -2.605619e-06 -2.330134e-06 -1.44098e-06 2.698043e-07 2.972714e-06 6.748858e-06 1.152962e-05 1.703543e-05 2.272479e-05 2.776698e-05 3.105368e-05 3.126302e-05 2.698613e-05 1.691886e-05 1.111765e-07 -2.374559e-05 -5.402672e-05 -8.892268e-05 -0.0001252819 -0.0001586044 -0.0001832359 -0.000192798 -0.0001808636 -0.0001418502 -7.206699e-05 2.919376e-05 0.0001586655 0.0003083858 0.0004654217 0.0006121983 0.0007275471 0.0007885116 0.0007728578 0.000662123 0.000444926 0.0001201614 -0.0003003695 -0.0007903605 -0.001308874 -0.00180202 -0.002206618 -0.00245579 -0.002486192 -0.002246312 -0.001705031 -0.0008594633 0.0002590112 0.001581566 0.003002498 0.004384142 0.005566316 0.006380002 0.006664402 0.006286043 0.005158141 0.003258199 0.0006416821 -0.002550179 -0.006090065 -0.009672703 -0.01293168 -0.01546425 -0.01686256 -0.01674898 -0.01481246 -0.01084255 -0.004757783 0.003374886 0.01333183 0.02473738 0.03708165 0.0497507 0.06206696 0.07333685 0.08290152 0.0901866 0.09474635 0.09629848105565657B3.624153e-09 1.470217e-08 2.924648e-08 1.675816e-08 -9.459693e-08 -4.005292e-07 -9.373652e-07 -1.530435e-06 -1.645991e-06 -4.078704e-07 3.030094e-06 8.685992e-06 1.476603e-05 1.713675e-05 1.012814e-05 -1.072856e-05 -4.439651e-05 -8.029797e-05 -9.761762e-05 -7.094869e-05 1.692091e-05 0.0001585078 0.0003094936 0.0003912802 0.0003155885 2.919058e-05 -0.0004362724 -0.0009354174 -0.001228394 -0.001061469 -0.0002939815 0.0009756478 0.002352455 0.003215585 0.002933434 0.001171051 -0.001829548 -0.005145646 -0.007361208 -0.007056359 -0.003467655 0.002935113 0.01024857 0.01551828 0.01570616 0.008987865 -0.004081172 -0.0200454 -0.03303498 -0.03628456 -0.0243088 0.004958794 0.04884525 0.1000148 0.1480753 0.1823112 0.1947111102111111112B-2.487704e-10 4.73744e-09 1.240319e-08 2.18423e-09 -2.973504e-08 -2.774098e-08 4.823501e-08 9.04852e-08 -4.377202e-08 -2.029251e-07 -2.93251e-08 3.576771e-07 2.380756e-07 -5.050541e-07 -6.566606e-07 5.295136e-07 1.333027e-06 -2.353909e-07 -2.234558e-06 -6.431887e-07 3.178848e-06 2.391894e-06 -3.766426e-06 -5.213131e-06 3.343879e-06 9.069842e-06 -1.034203e-06 -1.349476e-05 -4.128203e-06 1.740814e-05 1.293247e-05 -1.901116e-05 -2.55982e-05 1.583847e-05 4.129854e-05 -5.051362e-06 -5.769434e-05 -1.597343e-05 7.06232e-05 4.876212e-05 -7.411825e-05 -9.265601e-05 6.092806e-05 0.0001437042 -2.365713e-05 -0.0001938177 -4.346068e-05 0.0002305286 0.0001422997 -0.0002376966 -0.0002684195 0.00019742 0.0004089862 -9.322642e-05 -0.0005416667 -8.56506e-05 0.0006351459 0.0003394235 -0.0006517938 -0.0006531623 0.000552734 0.0009935334 -0.000305133 -0.00130802 -0.0001089867 0.001527642 0.0006836461 -0.001573808 -0.00138119 0.001369316 0.002127841 -0.0008527391 -0.002814326 -5.435981e-06 0.003302926 0.001187235 -0.003441493 -0.002615018 0.003083861 0.004145541 -0.002114933 -0.005572669 0.0004775432 0.006640171 0.001802538 -0.00706464 -0.004597381 0.00656694 0.007666904 -0.00490895 -0.01066029 0.001931064 0.01312888 0.002414871 -0.01454758 -0.008042475 0.01433728 0.01472206 -0.01187359 -0.02208895 0.00645106 0.0296694 0.002871719 -0.03692219 -0.01777635 0.04329151 0.04248361 -0.04826485 -0.09280869 0.05142858 0.3137774 0.447485810600180B2.67082e-08 8.32285e-08 1.94385e-07 3.76255e-07 6.24404e-07 8.88492e-07 1.03745e-06 8.17819e-07 -1.88601e-07 -2.59832e-06 -7.23828e-06 -1.51226e-05 -2.73811e-05 -4.51315e-05 -6.92953e-05 -0.000100369 -0.000138178 -0.000181643 -0.000228621 -0.000275855 -0.000319095 -0.000353401 -0.000373644 -0.000375171 -0.000354555 -0.000310343 -0.000243653 -0.000158518 -6.18352e-05 3.71254e-05 0.000127672 0.000199021 0.000241849 0.000249853 0.000221049 0.000158554 7.06486e-05 -3.0002e-05 -0.000127937 -0.000207141 -0.000253616 -0.00025789 -0.000217004 -0.000135557 -2.5537e-05 9.51778e-05 0.000205518 0.000284743 0.000316236 0.000290878 0.000209355 8.28454e-05 -6.81948e-05 -0.000217095 -0.000335486 -0.000398536 -0.000389979 -0.000305938 -0.00015671 3.40371e-05 0.000232714 0.000401493 0.000505302 0.000518878 0.000432514 0.000255237 1.45787e-05 -0.000247311 -0.000480763 -0.00063776 -0.000681539 -0.000594766 -0.000384515 -8.28166e-05 0.000257718 0.000572714 0.000797911 0.000881948 0.000797604 0.000549081 0.000173583 -0.000263593 -0.000679974 -0.000991109 -0.00112732 -0.00104878 -0.000755694 -0.000291221 0.000264018 0.000805406 0.0012236 0.00142618 0.00135744 0.00101227 0.000440728 -0.000258079 -0.000952548 -0.00150282 -0.00178842 -0.00173392 -0.00132727 -0.000626641 0.000246574 0.00112792 0.00184023 0.00222856 0.00219308 0.00171292 0.000855763 -0.00022999 -0.00133986 -0.00225101 -0.00276614 -0.00275516 -0.00218605 -0.00113772 0.000208611 0.00159948 0.00275586 0.00342803 0.00344823 0.00276999 0.00148557 -0.000183733 -0.00192399 -0.0033863 -0.00425529 -0.00431537 -0.00350103 -0.00192009 0.000156708 0.00233976 0.00419176 0.00531324 0.00542617 0.00443862 0.0024761 -0.000128937 -0.00289021 -0.00525541 -0.00671354 -0.00690136 -0.00568759 -0.00321628 0.000102091 0.00365383 0.00673075 0.00866603 0.00897195 0.00745242 0.0042659 -7.74563e-05 -0.004789 -0.00893601 -0.011616 -0.0121398 -0.0101886 -0.00591185 5.65209e-05 0.00667551 0.0126597 0.0167045 0.0177381 0.0151536 0.00898105 -4.09115e-05 -0.0105112 -0.0205336 -0.0279844 -0.0308424 -0.0275293 -0.0172056 3.13298e-05 0.0230828 0.0499361 0.0778964 0.103932 0.125084 0.138876 0.14366510500160B7.75144e-09 4.53772e-09 -5.3372e-08 -2.80899e-07 -9.11346e-07 -2.35194e-06 -5.23072e-06 -1.04113e-05 -1.89475e-05 -3.19532e-05 -5.0373e-05 -7.4666e-05 -0.000104445 -0.000138147 -0.000172838 -0.000204266 -0.000227231 -0.000236326 -0.000226965 -0.000196559 -0.000145589 -7.82785e-05 -2.59959e-06 7.05712e-05 0.000129149 0.00016217 0.000162334 0.000128167 6.52036e-05 -1.42938e-05 -9.3267e-05 -0.000153254 -0.000178617 -0.000160658 -0.000100551 -1.01526e-05 8.98321e-05 0.000174208 0.000219365 0.00020968 0.000142552 3.05356e-05 -0.000100376 -0.000216344 -0.000284081 -0.00028007 -0.000198144 -5.31045e-05 0.000121116 0.00027886 0.000374288 0.000374268 0.000269098 7.77583e-05 -0.000154417 -0.000365837 -0.000494488 -0.000495582 -0.000356151 -0.000102039 0.000205535 0.000483951 0.000650779 0.000647523 0.000458852 0.000121284 -0.000282282 -0.000642014 -0.000850322 -0.000833217 -0.000574947 -0.000128043 0.000395526 0.000851361 0.00110159 0.00105558 0.000700551 0.000112345 -0.000558753 -0.00112519 -0.00141357 -0.00131641 -0.000829312 -6.10946e-05 0.000788264 0.00147829 0.00179487 0.00161507 0.000950752 -4.37621e-05 -0.00110503 -0.00192868 -0.0022552 -0.00194959 -0.00105099 0.000224779 0.00153433 0.00249725 0.00280468 0.00231561 0.00111142 -0.000510507 -0.00210718 -0.00320865 -0.00345387 -0.00270515 -0.00110589 0.000939782 0.00286553 0.00409665 0.0042182 0.00310923 0.00100105 -0.00156353 -0.00386551 -0.00520724 -0.0051191 -0.0035152 -0.00074874 0.00245722 0.00519287 0.00661396 0.00619507 0.00390959 0.000278111 -0.00373934 -0.00698863 -0.00844379 -0.00751883 -0.0042774 0.000529093 0.00562055 0.00951518 0.0109444 0.00924347 0.00460376 -0.00189947 -0.00853129 -0.0133335 -0.0146694 -0.0117328 -0.00487461 0.00436211 0.0135399 0.0199084 0.0211419 0.0160512 0.00507765 -0.00958023 -0.0242775 -0.0346331 -0.0364985 -0.02697 -0.00520347 0.0271753 0.0661873 0.106146 0.140714 0.164141 0.1724271050080C4.032461e-05 7.45328e-05 0.0001234553 0.0001701887 0.0001973105 0.0001854891 0.0001193456 -5.723101e-06 -0.0001779232 -0.0003673259 -0.0005295104 -0.0006150085 -0.0005832354 -0.0004172837 -0.0001349516 0.000208333 0.000527709 0.0007281899 0.0007312587 0.0005019202 6.783176e-05 -0.0004771493 -0.000989158 -0.001308918 -0.001307358 -0.0009300168 -0.0002262541 0.0006483476 0.001461708 0.001963222 0.001956625 0.001367725 0.0002854628 -0.001040387 -0.002250679 -0.002969069 -0.002912737 -0.001990583 -0.0003573537 0.00159884 0.003340972 0.004323764 0.004155636 0.002736002 0.000323431 -0.002494752 -0.004934943 -0.006225197 -0.005836136 -0.003668966 -0.0001394092 0.003880228 0.007261232 0.008919356 0.008140252 0.00483705 -0.0003434785 -0.006115665 -0.01084778 -0.01299272 -0.01154995 -0.006430376 0.001391199 0.01000571 0.01698057 0.0199734 0.01740665 0.009029463 -0.003794969 -0.01818304 -0.03022295 -0.03578333 -0.03146898 -0.01550444 0.01167237 0.04726833 0.08650819 0.1234668 0.1521942 0.167893910132.6092432.6092467A-3.653417e-17 3.674881e-08 -4.270596e-07 1.14502e-06 -1.875941e-07 -3.372737e-07 2.787469e-06 -3.744026e-06 5.411719e-06 7.473363e-06 -0.0005177595 0.0002106768 4.632577e-05 -0.0006082222 0.001441747 -0.002406265 0.003225338 -0.00350639 0.002814411 -0.0007719714 -0.002805119 0.007778055 -0.01358146 0.01917646 -0.02297035 0.02403979 -0.02209865 0.008607339 0.01175252 -0.04477868 0.09649231 -0.1917548 0.5276523 0.724167 -0.1569053 0.04425742 0.003141684 -0.02667144 0.03615316 -0.03856867 0.03108417 -0.02352589 0.01532109 -0.007403983 0.001096454 0.003097965 -0.005193199 0.005561311 -0.004761101 0.003382132 -0.00192052 0.0007152175 7.677194e-05 -0.0004518973 0.0005026997 -0.000565037 -5.568005e-05 1.577356e-05 -1.419847e-06 8.149094e-07 6.807946e-07 -1.252728e-06 1.52435e-06 -2.833359e-07 -1.063838e-08 1.25712e-09 -5.429542e-1110117.2184817.2184839A4.189518e-13 0.0003303176 0.001029213 -0.003141228 0.0002057093 0.001525213 -0.006231927 0.01048013 -0.01312025 0.01078214 -0.00144455 -0.01587295 0.0395074 -0.06510363 0.08537156 -0.08919134 0.05006189 0.8372328 0.2667231 -0.1666931 0.09528399 -0.05092177 0.01614584 0.007063624 -0.01838771 0.01994141 -0.01548951 0.008527354 -0.002557887 -0.001811026 0.002426493 -0.003757695 0.0004672927 0.0006330721 -1.568741e-06 -1.254798e-05 3.210405e-07 -2.633241e-08 -5.099975e-081080015B0.000244 0.000977 0.002441 0.004883 0.008545 0.013672 0.020508 0.029297 0.039307 0.049805 0.060059 0.069336 0.076904 0.082031 0.083984102007B0.000244 0.00293 0.016112 0.053711 0.12085 0.193359 0.22558610200101A-1e-06 -6e-06 -3e-06 -4.3e-05 -5e-06 1e-06 2.3e-05 1e-06 -5.3e-05 -0.000367 0.000376 0.000854 3.1e-05 -0.001276 -0.000911 0.001277 0.002152 -0.000462 -0.003338 -0.001409 0.003771 0.004194 -0.002643 -0.007201 -0.000644 0.009184 0.006084 -0.008578 -0.01274 0.003982 0.018626 0.005205 -0.020941 -0.018163 0.016667 0.032245 -0.003466 -0.042953 -0.019326 0.044309 0.049791 -0.029416 -0.082608 -0.009342 0.107552 0.08166 -0.10311 -0.204208 -3.1e-05 0.390433 0.589958 0.390433 -3.1e-05 -0.204208 -0.10311 0.08166 0.107552 -0.009342 -0.082608 -0.029416 0.049791 0.044309 -0.019326 -0.042953 -0.003466 0.032245 0.016667 -0.018163 -0.020941 0.005205 0.018626 0.003982 -0.01274 -0.008578 0.006084 0.009184 -0.000644 -0.007201 -0.002643 0.004194 0.003771 -0.001409 -0.003338 -0.000462 0.002152 0.001277 -0.000911 -0.001276 3.1e-05 0.000854 0.000376 -0.000367 -0.00041 2.5e-05 0.000262 0.000121 -0.0001 -0.000162 -9.8e-05 -2.9e-05 -3e-06101000596B-1.4901161e-08 -2.9802322e-08 -4.4703484e-08 -5.9604645e-08 -8.9406967e-08 -8.9406967e-08 -8.9406967e-08 -8.9406967e-08 -5.9604645e-08 0 5.9604645e-08 1.3411045e-07 2.2351742e-07 2.9802322e-07 3.7252903e-07 4.1723251e-07 4.3213367e-07 4.0233135e-07 3.2782555e-07 1.937151e-07 2.9802322e-08 -1.7881393e-07 -3.8743019e-07 -6.1094761e-07 -8.046627e-07 -9.5367432e-07 -1.0430813e-06 -1.0430813e-06 -9.5367432e-07 -7.4505806e-07 -4.61936e-07 -8.9406967e-08 3.2782555e-07 7.7486038e-07 1.2069941e-06 1.5795231e-06 1.847744e-06 1.9967556e-06 1.9669533e-06 1.7732382e-06 1.4007092e-06 8.6426735e-07 2.0861626e-07 -5.364418e-07 -1.3113022e-06 -2.0265579e-06 -2.6375055e-06 -3.0845404e-06 -3.2931566e-06 -3.2484531e-06 -2.9206276e-06 -2.30968e-06 -1.4454126e-06 -3.8743019e-07 7.7486038e-07 1.9669533e-06 3.0845404e-06 4.0233135e-06 4.6938658e-06 5.0216913e-06 4.9322844e-06 4.4256449e-06 3.516674e-06 2.2351742e-06 6.8545342e-07 -1.0281801e-06 -2.771616e-06 -4.4107437e-06 -5.7667494e-06 -6.7353249e-06 -7.1972609e-06 -7.0929527e-06 -6.377697e-06 -5.081296e-06 -3.2931566e-06 -1.1026859e-06 1.296401e-06 3.7401915e-06 6.005168e-06 7.8976154e-06 9.2536211e-06 9.9092722e-06 9.7751617e-06 8.8065863e-06 7.0631504e-06 4.6342611e-06 1.6987324e-06 -1.5497208e-06 -4.8428774e-06 -7.9125166e-06 -1.0475516e-05 -1.2308359e-05 -1.3202429e-05 -1.3053417e-05 -1.1816621e-05 -9.5367432e-06 -6.3478947e-06 -2.4735928e-06 1.7881393e-06 6.1094761e-06 1.0147691e-05 1.3530254e-05 1.5959144e-05 1.7181039e-05 1.7032027e-05 1.5482306e-05 1.2561679e-05 8.4936619e-06 3.516674e-06 -1.9669533e-06 -7.5399876e-06 -1.2740493e-05 -1.7121434e-05 -2.028048e-05 -2.1904707e-05 -2.1800399e-05 -1.9878149e-05 -1.6242266e-05 -1.1116266e-05 -4.8577785e-06 2.0712614e-06 9.1195107e-06 1.5720725e-05 2.1293759e-05 2.5346875e-05 2.7477741e-05 2.7433038e-05 2.5138259e-05 2.0682812e-05 1.4334917e-05 6.5565109e-06 -2.0712614e-06 -1.0862947e-05 -1.911819e-05 -2.6121736e-05 -3.1247735e-05 -3.400445e-05 -3.4078956e-05 -3.1366944e-05 -2.5957823e-05 -1.8209219e-05 -8.687377e-06 1.9222498e-06 1.2755394e-05 2.2962689e-05 3.1650066e-05 3.8072467e-05 4.1604042e-05 4.1857362e-05 3.8683414e-05 3.2216311e-05 2.2873282e-05 1.1324883e-05 -1.5646219e-06 -1.4796853e-05 -2.7269125e-05 -3.7968159e-05 -4.5910478e-05 -5.0380826e-05 -5.0872564e-05 -4.722178e-05 -3.9577484e-05 -2.8416514e-05 -1.4558434e-05 9.8347664e-07 1.6957521e-05 3.2097101e-05 4.5105815e-05 5.4866076e-05 6.0468912e-05 6.1303377e-05 5.7145953e-05 4.8190355e-05 3.4973025e-05 1.847744e-05 -7.4505806e-08 -1.9237399e-05 -3.7431717e-05 -5.3167343e-05 -6.505847e-05 -7.2002411e-05 -7.3283911e-05 -6.8619847e-05 -5.8203936e-05 -4.2691827e-05 -2.3201108e-05 -1.1920929e-06 2.1591783e-05 4.3332577e-05 6.2197447e-05 7.6577067e-05 8.5130334e-05 8.700788e-05 8.1822276e-05 6.9797039e-05 5.1707029e-05 2.8848648e-05 2.9355288e-06 -2.4005771e-05 -4.978478e-05 -7.2285533e-05 -8.9541078e-05 -0.00010000169 -0.0001026094 -9.6932054e-05 -8.3163381e-05 -6.2197447e-05 -3.5539269e-05 -5.2154064e-06 2.643466e-05 5.6833029e-05 8.3476305e-05 0.00010408461 0.0001167804 0.00012032688 0.00011414289 9.8481774e-05 7.4341893e-05 4.3451786e-05 8.136034e-06 -2.8833747e-05 -6.4492226e-05 -9.5874071e-05 -0.00012032688 -0.00013564527 -0.00014033914 -0.00013370812 -0.00011599064 -8.8334084e-05 -5.2720308e-05 -1.1831522e-05 3.1143427e-05 7.2747469e-05 0.00010953844 0.00013840199 0.00015677512 0.00016285479 0.00015582144 0.00013591349 0.00010439754 6.352365e-05 1.6406178e-05 -3.3304095e-05 -8.161366e-05 -0.00012454391 -0.00015844405 -0.00018034875 -0.00018811226 -0.00018076599 -0.00015850365 -0.00012275577 -7.6085329e-05 -2.2009015e-05 3.5241246e-05 9.1090798e-05 0.00014095008 0.00018060207 0.0002065599 0.00021636486 0.00020879507 0.00018404424 0.000143677 9.059906e-05 2.8818846e-05 -3.6850572e-05 -0.00010116398 -0.00015883148 -0.00020501018 -0.00023564696 -0.00024786592 -0.00024020672 -0.00021281838 -0.00016742945 -0.00010730326 -3.6984682e-05 3.8057566e-05 0.00011181831 0.00017827749 0.00023183227 0.00026783347 0.00028289855 0.00027531385 0.000245139 0.00019431114 0.00012646616 4.671514e-05 -3.8728118e-05 -0.00012303889 -0.00019934773 -0.00026124716 -0.00030334294 -0.00032176077 -0.0003144443 -0.00028139353 -0.00022467971 -0.00014837086 -5.8233738e-05 3.8743019e-05 0.00013481081 0.00022211671 0.00029340386 0.00034245849 0.00036479533 0.0003579855 0.00032192469 0.00025886297 0.00017333031 7.1749091e-05 -3.7953258e-05 -0.00014705956 -0.00024668872 -0.0003284961 -0.00038544834 -0.00041235983 -0.00040635467 -0.00036717951 -0.00029730797 -0.00020170212 -8.7574124e-05 3.6194921e-05 0.00015978515 0.00027310848 0.00036674738 0.00043265522 0.00046485662 0.00045998394 0.00041763484 0.00034043193 0.00023387372 0.00010599196 -3.3304095e-05 -0.00017289817 -0.00030149519 -0.00040838122 -0.00048440695 -0.00052271783 -0.00051939487 -0.00047381222 -0.00038875639 -0.00027029216 -0.00012733042 2.9042363e-05 0.00018638372 0.00033195317 0.00045366585 0.00054112077 0.0005864799 0.0005851835 0.00053630769 0.00044286251 0.00031146407 0.00015199184 -2.3186207e-05 -0.0002001673 -0.00036460161 -0.00050289929 -0.00060325861 -0.00065672398 -0.00065797567 -0.00060583651 -0.00050340593 -0.00035795569 -0.00018042326 1.5467405e-05 0.00021415949 0.00039958954 0.00055642426 0.00067132711 0.00073410571 0.00073857605 0.00068315864 0.00057114661 0.00041045249 0.00021311641 -5.5730343e-06 -0.00022831559 -0.00043709576 -0.0006146729 -0.00074599683 -0.00081944466 -0.00082786381 -0.00076925755 -0.00064700842 -0.0004697293 -0.00025068223 -6.839633e-06 0.0002425462 0.0004773289 0.00067815185 0.00082799792 0.0009136945 0.00092695653 0.00086525083 0.00073204935 0.00053671002 0.000293836 2.2187829e-05 -0.00025677681 -0.00052057207 -0.00074745715 -0.00091828406 -0.0010180175 -0.0010371506 -0.00097249448 -0.00082756579 -0.00061254203 -0.00034342706 -4.0978193e-05 0.00027091801 0.000567168 0.00082337856 0.0010179579 0.0011338592 0.0011600703 0.0010926574 0.00093518198 0.00069859624 0.00040048361 6.3806772e-05 -0.0002848953 -0.00061756372 -0.00090688467 -0.0011284947 -0.0012630075 -0.0012977421 -0.0012278557 -0.00105685 -0.00079657137 -0.00046630204 -9.1403723e-05 0.00029858947 0.00067235529 0.00099922717 0.0012516975 0.0014077574 0.001452744 0.0013807267 0.0011951327 0.00090865791 0.00054249167 0.00012467802 -0.00031194091 -0.00073233247 -0.0011020154 -0.0013899505 -0.0015710592 -0.0016283989 -0.0015547276 -0.0013532639 -0.0010376722 -0.00063115358 -0.00016479194 0.00032484531 0.00079850852 0.0012174249 0.0015464127 0.0017568469 0.0018291175 0.0017544031 0.0015355796 0.0011873096 0.00073504448 0.00021326542 -0.00033722818 -0.00087235868 -0.0013483465 -0.0017252862 -0.0019703805 -0.0020608306 -0.0019858629 -0.0017478764 -0.0013625622 -0.00085788965 -0.00027212501 0.0003489852 0.00095583498 0.0014988333 0.0019324422 0.0022189617 0.0023317486 0.0022576153 0.0019982457 0.0015703887 0.0010048151 0.00034421682 -0.00036002696 -0.0010517836 -0.0016745776 -0.0021761805 -0.0025129467 -0.0026535392 -0.0025817603 -0.0022982061 -0.0018207133 -0.0011832267 -0.00043353438 0.00037029386 0.0011643618 0.001883924 0.0024686307 0.0028675199 0.0030433834 0.0029761195 0.0026648045 0.0021282732 0.0014040917 0.00054608285 -0.00037971139 -0.0012999028 -0.0021395683 -0.0028283 -0.0033058971 -0.0035276115 -0.0034681857 -0.0031243712 -0.0025158972 -0.0016844869 -0.00069116056 0.00038819015 0.0014684051 0.0024617314 0.0032847822 0.0038653761 0.0041487515 0.0041024983 0.0037198365 0.0030210167 0.0020525306 0.0008842051 -0.00039567053 -0.0016867965 -0.0028845817 -0.0038884133 -0.0046098083 -0.0049800575 -0.0049563646 -0.0045262277 -0.00370951 -0.0025580525 -0.0011527091 0.00040210783 0.001985833 0.0034708232 0.0047322661 0.0056582838 0.0061594099 0.0061766505 0.0056874156 0.0047089309 0.0032985955 0.0015510768 -0.00040744245 -0.0024278462 -0.0043489337 -0.0060094744 -0.0072610825 -0.0079804361 -0.0080803782 -0.0075185001 -0.006302923 -0.0044943094 -0.0022040308 0.00041162968 0.0031610429 0.0058299601 0.0081966519 0.010048419 0.011198416 0.011501536 0.010868102 0.0092744082 0.0067691207 0.0034749806 -0.00041465461 -0.0046443045 -0.0089096129 -0.012874499 -0.016191542 -0.018523946 -0.019567996 -0.019074455 -0.01686731 -0.012858912 -0.0070598722 0.00041647255 0.009356007 0.019451886 0.030318514 0.041510671 0.052546695 0.062934399 0.072198108 0.079905152 0.085690379 0.089276925 0.0904920110200248B-5.9604645e-08 0 5.9604645e-08 1.1920929e-07 -1.1920929e-07 -2.3841858e-07 1.1920929e-07 4.1723251e-07 0 -5.9604645e-07 -2.3841858e-07 7.1525574e-07 5.9604645e-07 -7.1525574e-07 -1.013279e-06 5.9604645e-07 1.5497208e-06 -1.7881393e-07 -2.0265579e-06 -4.7683716e-07 2.3245811e-06 1.4305115e-06 -2.3245811e-06 -2.5629997e-06 1.847744e-06 3.695488e-06 -9.5367432e-07 -4.7087669e-06 -5.364418e-07 5.364418e-06 2.4437904e-06 -5.4240227e-06 -4.7683716e-06 4.6491623e-06 7.0929527e-06 -2.9802322e-06 -9.2387199e-06 2.9802322e-07 1.0669231e-05 3.2782555e-06 -1.1086464e-05 -7.5101852e-06 1.013279e-05 1.1920929e-05 -7.5101852e-06 -1.6033649e-05 3.0994415e-06 1.9133091e-05 2.8610229e-06 -2.0623207e-05 -1.001358e-05 1.9848347e-05 1.7821789e-05 -1.6391277e-05 -2.5391579e-05 9.9539757e-06 3.1590462e-05 -7.1525574e-07 -3.5464764e-05 -1.0788441e-05 3.6001205e-05 2.3722649e-05 -3.2246113e-05 -3.6776066e-05 2.3782253e-05 4.8458576e-05 -1.0669231e-05 -5.7041645e-05 -6.6757202e-06 6.0796738e-05 2.6941299e-05 -5.8352947e-05 -4.8398972e-05 4.8696995e-05 6.878376e-05 -3.1471252e-05 -8.559227e-05 7.212162e-06 9.6201897e-05 2.270937e-05 -9.8049641e-05 -5.6028366e-05 8.9466572e-05 8.9585781e-05 -6.9320202e-05 -0.00011974573 3.772974e-05 0.0001424551 3.8743019e-06 -0.00015383959 -5.2630901e-05 0.00015074015 0.00010460615 -0.0001308918 -0.00015461445 9.3519688e-05 0.00019699335 -3.9696693e-05 -0.00022578239 -2.7596951e-05 0.00023567677 0.00010335445 -0.00022250414 -0.00018101931 0.00018393993 0.00025254488 -0.00011992455 -0.00030958652 3.3140182e-05 0.00034373999 7.0869923e-05 -0.00034797192 -0.00018399954 0.00031727552 0.00029581785 -0.00024944544 -0.00039464235 0.00014597178 0.0004684329 -1.2159348e-05 -0.00050592422 -0.00014269352 0.00049799681 0.00030595064 -0.0004389286 -0.00046211481 0.00032758713 0.00059455633 -0.00016772747 -0.0006865263 -3.1232834e-05 0.00072330236 0.00025475025 -0.00069361925 -0.0004837513 0.0005915761 0.00069624186 -0.00041753054 -0.00086897612 0.00017940998 0.00097954273 0.0001077652 -0.001009047 -0.00042194128 0.00094413757 0.00073581934 -0.00077921152 -0.0010185838 0.00051772594 0.0012387037 -0.00017309189 -0.0013669133 -0.00023144484 0.0013794303 0.00066399574 -0.0012609363 -0.0010861158 0.0010068417 0.0014558434 -0.00062537193 -0.0017312169 0.00013786554 0.0018745065 0.00042140484 -0.0018563271 -0.0010075569 0.0016592741 0.0015676618 -0.0012812018 -0.0020453334 0.00073683262 0.0023854971 -5.8472157e-05 -0.0025398135 -0.00070518255 0.0024721622 0.0014918447 -0.0021630526 -0.0022298694 0.0016136765 0.0028441548 -0.00084781647 -0.003262639 -8.7857246e-05 0.0034232736 0.0011253953 -0.0032808781 -0.0021798015 0.0028132796 0.0031546354 -0.0020257235 -0.0039498806 0.00095337629 0.0044702291 0.00033837557 -0.0046343803 -0.0017566085 0.0043838024 0.0031859279 -0.0036905408 -0.0044961572 0.0025627613 0.0055519938 -0.0010485053 -0.0062241554 -0.00076419115 0.0064008236 0.0027500987 -0.0059993267 -0.0047528148 0.0049758554 0.0065940022 -0.0033336878 -0.0080848336 0.0011284351 0.0090395808 0.0015304685 -0.0092895031 -0.0044813156 0.0086966157 0.0075156093 -0.0071663857 -0.010386407 0.0046575069 0.012818873 -0.0011888742 -0.014521122 -0.0031578541 0.015194237 0.0082397461 -0.014536679 -0.013858736 0.012238562 0.019771576 -0.007954061 -0.025703967 0.0012265444 0.031366527 0.0087025166 -0.036472917 -0.0234254 0.040758014 0.047181845 -0.043994844 -0.095913231 0.046009839 0.31486189 0.4533060810500591B2.9802322e-08 2.9802322e-08 2.9802322e-08 0 -2.9802322e-08 -5.9604645e-08 -5.9604645e-08 -5.9604645e-08 -2.9802322e-08 0 5.9604645e-08 1.1920929e-07 1.1920929e-07 1.1920929e-07 2.9802322e-08 -5.9604645e-08 -1.4901161e-07 -2.0861626e-07 -2.0861626e-07 -1.1920929e-07 0 1.4901161e-07 2.682209e-07 2.9802322e-07 2.3841858e-07 8.9406967e-08 -8.9406967e-08 -2.9802322e-07 -4.1723251e-07 -4.1723251e-07 -2.682209e-07 -2.9802322e-08 2.682209e-07 4.7683716e-07 5.6624412e-07 4.7683716e-07 2.0861626e-07 -1.1920929e-07 -4.7683716e-07 -6.8545342e-07 -7.1525574e-07 -4.7683716e-07 -8.9406967e-08 3.5762787e-07 7.4505806e-07 9.2387199e-07 8.046627e-07 4.1723251e-07 -1.1920929e-07 -6.8545342e-07 -1.0430813e-06 -1.1026859e-06 -8.3446503e-07 -2.3841858e-07 4.4703484e-07 1.0430813e-06 1.3709068e-06 1.2516975e-06 7.1525574e-07 -5.9604645e-08 -8.9406967e-07 -1.4901161e-06 -1.6391277e-06 -1.2814999e-06 -4.7683716e-07 5.0663948e-07 1.4007092e-06 1.9073486e-06 1.8179417e-06 1.1622906e-06 8.9406967e-08 -1.0728836e-06 -1.9669533e-06 -2.2947788e-06 -1.9073486e-06 -8.6426735e-07 4.7683716e-07 1.758337e-06 2.5629997e-06 2.5629997e-06 1.7881393e-06 3.5762787e-07 -1.2218952e-06 -2.5331974e-06 -3.0994415e-06 -2.7120113e-06 -1.4305115e-06 3.5762787e-07 2.1159649e-06 3.3080578e-06 3.516674e-06 2.592802e-06 8.3446503e-07 -1.3113022e-06 -3.1292439e-06 -4.0531158e-06 -3.7550926e-06 -2.2351742e-06 5.9604645e-08 2.4735928e-06 4.2021275e-06 4.6789646e-06 3.695488e-06 1.5199184e-06 -1.2516975e-06 -3.7550926e-06 -5.2154064e-06 -5.0663948e-06 -3.3080578e-06 -4.4703484e-07 2.7120113e-06 5.1558018e-06 6.0796738e-06 5.1259995e-06 2.5033951e-06 -1.013279e-06 -4.4107437e-06 -6.5565109e-06 -6.7055225e-06 -4.7683716e-06 -1.2218952e-06 2.8610229e-06 6.2286854e-06 7.7784061e-06 6.9439411e-06 3.8743019e-06 -5.364418e-07 -5.0067902e-06 -8.1062317e-06 -8.7618828e-06 -6.6757202e-06 -2.3841858e-06 2.8312206e-06 7.390976e-06 9.8347664e-06 9.2685223e-06 5.7518482e-06 2.682209e-07 -5.543232e-06 -9.8645687e-06 -1.1235476e-05 -9.1195107e-06 -4.0531158e-06 2.5629997e-06 8.6128712e-06 1.2218952e-05 1.2129545e-05 8.1956387e-06 1.4901161e-06 -5.9604645e-06 -1.1861324e-05 -1.4275312e-05 -1.2248755e-05 -6.2584877e-06 1.937151e-06 9.894371e-06 1.5050173e-05 1.5676022e-05 1.1354685e-05 3.2484531e-06 -6.2286854e-06 -1.4126301e-05 -1.7881393e-05 -1.6152859e-05 -9.2387199e-06 9.2387199e-07 1.1205673e-05 1.8358231e-05 1.9997358e-05 1.5377998e-05 5.6922436e-06 -6.1988831e-06 -1.6629696e-05 -2.2232533e-05 -2.1010637e-05 -1.3053417e-05 -6.5565109e-07 1.2457371e-05 2.2172928e-05 2.5272369e-05 2.0444393e-05 8.9406967e-06 -5.8412552e-06 -1.9401312e-05 -2.7358532e-05 -2.6941299e-05 -1.79708e-05 -2.9206276e-06 1.3679266e-05 2.6583672e-05 3.1590462e-05 2.6732683e-05 1.3202429e-05 -5.0663948e-06 -2.2441149e-05 -3.3408403e-05 -3.4183264e-05 -2.4110079e-05 -5.9902668e-06 1.475215e-05 3.1650066e-05 3.9130449e-05 3.4451485e-05 1.8656254e-05 -3.695488e-06 -2.5779009e-05 -4.0501356e-05 -4.2915344e-05 -3.1769276e-05 -1.0073185e-05 1.5646219e-05 3.7401915e-05 4.8071146e-05 4.3869019e-05 2.5510788e-05 -1.6689301e-06 -2.9355288e-05 -4.8756599e-05 -5.3346157e-05 -4.1127205e-05 -1.5348196e-05 1.630187e-05 4.3958426e-05 5.865097e-05 5.5193901e-05 3.4034252e-05 1.2218952e-06 -3.3199787e-05 -5.8323145e-05 -6.5773726e-05 -5.2541494e-05 -2.2053719e-05 1.6570091e-05 5.1349401e-05 7.1018934e-05 6.8753958e-05 4.4465065e-05 5.1259995e-06 -3.7312508e-05 -6.9350004e-05 -8.0436468e-05 -6.6280365e-05 -3.0428171e-05 1.642108e-05 5.9694052e-05 8.5473061e-05 8.4877014e-05 5.7160854e-05 1.0222197e-05 -4.1633844e-05 -8.2045794e-05 -9.7692013e-05 -8.2671642e-05 -4.0709972e-05 1.5705824e-05 6.9051981e-05 0.00010222197 0.0001038909 7.2419643e-05 1.6748905e-05 -4.6223402e-05 -9.6559525e-05 -0.00011780858 -0.00010210276 -5.325675e-05 1.4275312e-05 7.9542398e-05 0.00012159348 0.00012618303 9.0628862e-05 2.4884939e-05 -5.1021576e-05 -0.00011312962 -0.0001411736 -0.00012499094 -6.8336725e-05 1.2069941e-05 9.1224909e-05 0.00014385581 0.00015220046 0.00011214614 3.4928322e-05 -5.5968761e-05 -0.00013193488 -0.00016817451 -0.00015178323 -8.6307526e-05 8.8512897e-06 0.00010424852 0.0001693666 0.00018236041 0.00013744831 4.7147274e-05 -6.1124563e-05 -0.00015324354 -0.00019925833 -0.00018295646 -0.00010761619 4.529953e-06 0.00011870265 0.00019848347 0.00021719933 0.00016701221 6.1810017e-05 -6.6399574e-05 -0.00017729402 -0.0002348721 -0.00021904707 -0.00013262033 -1.1324883e-06 0.0001347363 0.00023159385 0.00025725365 0.00020134449 7.930398e-05 -7.1763992e-05 -0.00020441413 -0.00027555227 -0.00026062131 -0.00016182661 -8.2850456e-06 0.00015243888 0.00026914477 0.00030308962 0.00024101138 9.9956989e-05 -7.724762e-05 -0.0002348423 -0.00032186508 -0.00030836463 -0.00019574165 -1.7166138e-05 0.00017204881 0.00031161308 0.00035539269 0.00028663874 0.00012415648 -8.2731247e-05 -0.00026902556 -0.00037443638 -0.00036293268 -0.00023490191 -2.8014183e-05 0.00019365549 0.00035953522 0.00041490793 0.00033897161 0.00015234947 -8.8244677e-05 -0.00030726194 -0.00043395162 -0.00042515993 -0.00027996302 -4.1097403e-05 0.00021749735 0.00041353703 0.0004824698 0.00039875507 0.00018504262 -9.3758106e-05 -0.00035008788 -0.00050121546 -0.00049591064 -0.00033161044 -5.6654215e-05 0.0002438128 0.00047427416 0.00055897236 0.00046688318 0.00022280216 -9.9182129e-05 -0.00039795041 -0.00057715178 -0.0005762279 -0.00039067864 -7.5101852e-05 0.00027287006 0.00054258108 0.00064554811 0.00054439902 0.00026622415 -0.00010454655 -0.00045147538 -0.00066280365 -0.000667274 -0.0004580915 -9.6738338e-05 0.00030499697 0.00061935186 0.00074341893 0.00063249469 0.00031605363 -0.00010976195 -0.00051134825 -0.00075939298 -0.00077044964 -0.00053495169 -0.00012201071 0.00034052134 0.0007057488 0.00085410476 0.00073257089 0.00037321448 -0.00011485815 -0.00057846308 -0.00086843967 -0.00088745356 -0.00062254071 -0.0001514852 0.00037997961 0.00080308318 0.00097942352 0.00084635615 0.00043869019 -0.00011977553 -0.00065386295 -0.00099176168 -0.0010202527 -0.0007224977 -0.00018575788 0.00042393804 0.00091299415 0.0011216104 0.00097593665 0.00051382184 -0.0001244843 -0.00073879957 -0.001131624 -0.0011714697 -0.00083681941 -0.00022557378 0.00047317147 0.0010376275 0.0012834966 0.0011240244 0.00060024858 -0.00012898445 -0.00083500147 -0.0012909174 -0.0013442934 -0.00096800923 -0.0002720058 0.00052860379 0.0011796355 0.0014687181 0.0012940466 0.00070008636 -0.00013321638 -0.00094464421 -0.0014734566 -0.0015429854 -0.0011194348 -0.00032624602 0.00059151649 0.0013426244 0.0016820431 0.0014905035 0.00081604719 -0.00013720989 -0.0010705888 -0.0016842186 -0.001773119 -0.0012954772 -0.0003900528 0.00066363811 0.0015313625 0.0019299686 0.0017195642 0.00095194578 -0.00014090538 -0.0012168288 -0.0019300878 -0.0020424724 -0.0015022457 -0.00046575069 0.00074738264 0.0017524958 0.0022214651 0.0019897223 0.0011130273 -0.00014433265 -0.0013888776 -0.0022208095 -0.0023619235 -0.0017483532 -0.00055667758 0.00084617734 0.0020155013 0.0025694668 0.0023132563 0.001306802 -0.00014746189 -0.0015949011 -0.0025703907 -0.0027473867 -0.0020463765 -0.00066772103 0.000965029 0.0023345053 0.002993077 0.0027084649 0.0015445948 -0.00015026331 -0.0018470287 -0.003000319 -0.0032230616 -0.0024156272 -0.00080636144 0.001111716 0.0027312636 0.0035220981 0.0032038987 0.0018441677 -0.0001527369 -0.0021646321 -0.0035445094 -0.0038276911 -0.0028869808 -0.00098466873 0.0012989342 0.0032413304 0.0042053461 0.0038466752 0.0022348464 -0.00015491247 -0.0025801659 -0.0042604804 -0.0046271384 -0.0035134852 -0.0012235045 0.0015485883 0.0039269328 0.0051289201 0.0047204494 0.0027693212 -0.00015673041 -0.0031526983 -0.0052537024 -0.0057431757 -0.0043939352 -0.0015620291 0.0019024611 0.0049074888 0.006459713 0.0059890747 0.0035517216 -0.00015825033 -0.0040018559 -0.0067402124 -0.0074286759 -0.0057361722 -0.0020837784 0.0024507642 0.0064446032 0.008569032 0.0080227852 0.0048213005 -0.00015941262 -0.0054120123 -0.0092433095 -0.010307729 -0.0080637336 -0.0030037165 0.0034307539 0.0092432201 0.012482017 0.011871159 0.0072752833 -0.00016024709 -0.008263737 -0.014438123 -0.016450465 -0.013180375 -0.0050933063 0.0057327151 0.016076714 0.022447169 0.022137076 0.014167935 -0.00016075373 -0.017312884 -0.03219074 -0.039314687 -0.034229904 -0.014782161 0.018081814 0.060228378 0.1050289 0.14469498 0.17195278 0.18165725101581.99981.99983B-4.04791e-10 -1.39029e-10 6.728e-10 2.75797e-09 7.54651e-09 1.76668e-08 3.76936e-08 7.52313e-08 1.42425e-07 2.57999e-07 4.49904e-07 7.58655e-07 1.24139e-06 1.97658e-06 3.06939e-06 4.65728e-06 6.91569e-06 1.00631e-05 1.43649e-05 2.0135e-05 2.77346e-05 3.75661e-05 5.00617e-05 6.56652e-05 8.48049e-05 0.000107859 0.000135108 0.000166685 0.000202505 0.0002422 0.000285038 0.00032985 0.000374956 0.000418099 0.0004564 0.000486339 0.000503757 0.000503913 0.00048158 0.000431195 0.000347068 0.000223655 5.58818e-05 -0.000160476 -0.000428365 -0.000749008 -0.00112143 -0.00154201 -0.00200406 -0.00249746 -0.00300841 -0.00351925 -0.00400852 -0.00445107 -0.00481847 -0.00507953 -0.00520107 -0.00514889 -0.00488884 -0.00438813 -0.00361664 -0.0025484 -0.00116293 0.00055335 0.00260595 0.00499129 0.00769592 0.0106961 0.0139576 0.0174363 0.0210783 0.0248217 0.0285976 0.0323318 0.0359474 0.0393664 0.0425127 0.0453141 0.0477047 0.0496275 0.0510356 0.0518947 0.05218341010939394B8.46923e-10 2.27422e-09 4.53895e-09 6.87379e-09 7.10904e-09 7.62784e-10 -1.98301e-08 -6.60994e-08 -1.53114e-07 -2.98249e-07 -5.18152e-07 -8.23636e-07 -1.21248e-06 -1.66055e-06 -2.11247e-06 -2.47353e-06 -2.60562e-06 -2.33013e-06 -1.44098e-06 2.69804e-07 2.97271e-06 6.74886e-06 1.15296e-05 1.70354e-05 2.27248e-05 2.7767e-05 3.10537e-05 3.1263e-05 2.69861e-05 1.69189e-05 1.11176e-07 -2.37456e-05 -5.40267e-05 -8.89227e-05 -0.000125282 -0.000158604 -0.000183236 -0.000192798 -0.000180864 -0.00014185 -7.2067e-05 2.91938e-05 0.000158665 0.000308386 0.000465422 0.000612198 0.000727547 0.000788512 0.000772858 0.000662123 0.000444926 0.000120161 -0.000300369 -0.000790361 -0.00130887 -0.00180202 -0.00220662 -0.00245579 -0.00248619 -0.00224631 -0.00170503 -0.000859463 0.000259011 0.00158157 0.0030025 0.00438414 0.00556632 0.00638 0.0066644 0.00628604 0.00515814 0.0032582 0.000641682 -0.00255018 -0.00609006 -0.0096727 -0.0129317 -0.0154642 -0.0168626 -0.016749 -0.0148125 -0.0108426 -0.00475778 0.00337489 0.0133318 0.0247374 0.0370816 0.0497507 0.062067 0.0733368 0.0829015 0.0901866 0.0947464 0.0962985105565657B3.62415e-09 1.47022e-08 2.92465e-08 1.67582e-08 -9.45969e-08 -4.00529e-07 -9.37365e-07 -1.53044e-06 -1.64599e-06 -4.0787e-07 3.03009e-06 8.68599e-06 1.4766e-05 1.71367e-05 1.01281e-05 -1.07286e-05 -4.43965e-05 -8.0298e-05 -9.76176e-05 -7.09487e-05 1.69209e-05 0.000158508 0.000309494 0.00039128 0.000315589 2.91906e-05 -0.000436272 -0.000935417 -0.00122839 -0.00106147 -0.000293981 0.000975648 0.00235246 0.00321559 0.00293343 0.00117105 -0.00182955 -0.00514565 -0.00736121 -0.00705636 -0.00346765 0.00293511 0.0102486 0.0155183 0.0157062 0.00898787 -0.00408117 -0.0200454 -0.033035 -0.0362846 -0.0243088 0.00495879 0.0488453 0.100015 0.148075 0.182311 0.194711102111111112B-2.4877e-10 4.73744e-09 1.24032e-08 2.18423e-09 -2.9735e-08 -2.7741e-08 4.8235e-08 9.04852e-08 -4.3772e-08 -2.02925e-07 -2.93251e-08 3.57677e-07 2.38076e-07 -5.05054e-07 -6.56661e-07 5.29514e-07 1.33303e-06 -2.35391e-07 -2.23456e-06 -6.43189e-07 3.17885e-06 2.39189e-06 -3.76643e-06 -5.21313e-06 3.34388e-06 9.06984e-06 -1.0342e-06 -1.34948e-05 -4.1282e-06 1.74081e-05 1.29325e-05 -1.90112e-05 -2.55982e-05 1.58385e-05 4.12985e-05 -5.05136e-06 -5.76943e-05 -1.59734e-05 7.06232e-05 4.87621e-05 -7.41183e-05 -9.2656e-05 6.09281e-05 0.000143704 -2.36571e-05 -0.000193818 -4.34607e-05 0.000230529 0.0001423 -0.000237697 -0.00026842 0.00019742 0.000408986 -9.32264e-05 -0.000541667 -8.56506e-05 0.000635146 0.000339424 -0.000651794 -0.000653162 0.000552734 0.000993533 -0.000305133 -0.00130802 -0.000108987 0.00152764 0.000683646 -0.00157381 -0.00138119 0.00136932 0.00212784 -0.000852739 -0.00281433 -5.43598e-06 0.00330293 0.00118724 -0.00344149 -0.00261502 0.00308386 0.00414554 -0.00211493 -0.00557267 0.000477543 0.00664017 0.00180254 -0.00706464 -0.00459738 0.00656694 0.0076669 -0.00490895 -0.0106603 0.00193106 0.0131289 0.00241487 -0.0145476 -0.00804248 0.0143373 0.0147221 -0.0118736 -0.0220889 0.00645106 0.0296694 0.00287172 -0.0369222 -0.0177763 0.0432915 0.0424836 -0.0482648 -0.0928087 0.0514286 0.313777 0.4474861993-01-01T00:00:00.0000ZGEOFON Program, GFZ Potsdam, Germanyfalsetrue{"type":"DOI","value":"10.14470/TR560404"}FDSNXML:Identifier/02008-09-20T00:00:00.0000ZSondre Stromfjord, Greenland66.9961-50.62076330Greenlandfalsetrue2016-01-20T13:00:00.0000Z66.996101-50.6207623302016-01-20T13:00:00.0000Zxxxx0yyyy020100-9040651200000.05M/Sfalse{"description":"Streckeisen STS-1VBB w/E300","resourceId":"Sensor/20190326190423.588647.428"}FDSNXML:Sensor{"resourceId":"Datalogger/20190326190423.588618.427"}FDSNXML:DataLogger2014-02-16T00:00:00.0000Z66.996101-50.6207623302014-02-16T00:00:00.0000Zxxxx0yyyy040100-9024477900000.02M/Sfalse{"id":1,"value":"Channel is down."}FDSNXML:Comment/12006-10-25T16:06:00.0000Z{"id":2,"value":"Channel is down."}FDSNXML:Comment/22007-04-19T18:50:00.0000Z{"description":"Streckeisen STS-2.5","resourceId":"Sensor/20190326190423.597322.612"}FDSNXML:Sensor{"resourceId":"Datalogger/20190326190423.597294.611"}FDSNXML:DataLogger2018-10-01T00:00:00.0000ZGEOFON/GGSA Station Accra, Ghana5.641485-0.20711775Ghanafalsetrue2018-10-01T00:00:00.0000Z5.64147-0.20716752018-10-01T00:00:00.0000Z0028200000932020110-904756000001M/Sfalse{"type":"Nanometrics Trillium 240","manufacturer":"Nanometrics","model":"Trillium 240","serialNumber":"000932","resourceId":"NRL/Nanometrics/Trillium240_g2.244.1189"}FDSNXML:Sensor{"type":"Nanometrics Centaur","manufacturer":"Nanometrics","model":"Centaur","serialNumber":"002820","resourceId":"NRL/Nanometrics/CENTAUR.1.20.OFF"}FDSNXML:DataLogger2018-10-01T00:00:00.0000Z0028202000932220119004756000001M/Sfalse{"type":"Nanometrics Trillium 240","manufacturer":"Nanometrics","model":"Trillium 240","serialNumber":"000932","resourceId":"NRL/Nanometrics/Trillium240_g2.244.1189"}FDSNXML:Sensor{"type":"Nanometrics Centaur","manufacturer":"Nanometrics","model":"Centaur","serialNumber":"002820","resourceId":"NRL/Nanometrics/CENTAUR.1.20.OFF"}FDSNXML:DataLogger2018-10-01T00:00:00.0000Z0028201yyyy12011004756000001M/Sfalse{"type":"Nanometrics Trillium 240","manufacturer":"Nanometrics","model":"Trillium 240","resourceId":"NRL/Nanometrics/Trillium240_g2.244.1189"}FDSNXML:Sensor{"type":"Nanometrics Centaur","manufacturer":"Nanometrics","model":"Centaur","serialNumber":"002820","resourceId":"NRL/Nanometrics/CENTAUR.1.20.OFF"}FDSNXML:DataLogger2009-04-10T00:00:00.0000ZLimon Verde, Chile-22.6127-68.91112960Chilefalsetrue2012-12-12T16:30:00.0000Z-22.6127-68.91110229302012-12-12T16:30:00.0000Zxxxx0yyyy0201300-9043436200000.05M/Sfalse{"id":1,"value":"Channel has long period noise."}FDSNXML:Comment/12002-04-18T21:44:00.0000Z{"id":2,"value":"Invalid instrument response."}FDSNXML:Comment/22009-02-05T22:24:00.0000Z{"description":"Streckeisen STS-1VBB w/E300","resourceId":"Sensor/20190326190421.432177.703"}FDSNXML:Sensor{"resourceId":"Datalogger/20190326190421.432149.702"}FDSNXML:DataLogger2012-09-24T12:00:00.0000Z-22.6127-68.91110229302012-09-24T12:00:00.0000Zxxxx0yyyy0401300-9025065100000.02M/Sfalse{"id":1,"value":"Seismometer mass is against stops."}FDSNXML:Comment/12001-06-26T15:11:00.0000Z{"id":2,"value":"Seismometer mass is against stops."}FDSNXML:Comment/22002-04-21T19:06:00.0000Z{"id":3,"value":"Channel is down."}FDSNXML:Comment/32003-07-14T10:41:00.0000Z{"description":"Streckeisen STS-2.5","resourceId":"Sensor/20190326190421.441749.907"}FDSNXML:Sensor{"resourceId":"Datalogger/20190326190421.441721.906"}FDSNXML:DataLogger2008-05-02T00:00:00.0000ZNOA/GEOFON Station Apeiranthos,Naxos, Greece37.0727425.52301608Greecefalsetrue2008-05-02T00:00:00.0000Z37.0727425.523016082008-05-02T00:00:00.0000Zxxxx1yyyy12010005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2008-05-02T00:00:00.0000Zxxxx2yyyy220109005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2008-05-02T00:00:00.0000Zxxxx0yyyy020100-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2014-01-23T00:00:00.0000ZGEOFON/MedNet/KOERI Station Arapgir, Turkey39.0928938.335571537Turkeyfalsetrue2014-01-23T00:00:00.0000Z39.0928938.3355715372014-01-23T00:00:00.0000Zxxxx120023120100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"20023","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2014-01-23T00:00:00.0000Zxxxx2200232201090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"20023","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2014-01-23T00:00:00.0000Zxxxx020023020100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","serialNumber":"20023","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2018-01-12T00:00:00.0000ZStation GEOFON, JSO/DESERVE Station Balqa/Jordan32.087335.64375JSO/DESERVE Station Balqa/Jordanfalsetrue2018-01-12T00:00:00.0000Z32.087335.643752018-01-12T00:00:00.0000Zxxxx1yyyy120130025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2018-01-12T00:00:00.0000Zxxxx2yyyy2201390025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2018-01-12T00:00:00.0000Zxxxx0yyyy020130-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger1999-11-11T00:00:00.0000ZINETER/GEOFON Station INETER, GEOFON Station Boaco/Nicaragua12.4493-85.6659381GEOFON Station Boaco/Nicaraguafalsetrue2012-05-23T00:00:00.0000Z12.4493-85.66593812012-05-23T00:00:00.0000Zxxxx1yyyy12015005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2012-05-23T00:00:00.0000Zxxxx2yyyy220159005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2012-05-23T00:00:00.0000Zxxxx0yyyy020150-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger1998-12-09T00:00:00.0000ZGEOFON/GII Station Mathiatis, Cyprus34.961133.331396Cyprusfalsetrue2008-08-28T00:00:00.0000Z34.961133.3313962008-08-28T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-08-28T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-08-28T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-05-23T00:00:00.0000ZGEOFON Station Eilat, Israel29.669934.9512210Israelfalsetrue2008-05-23T00:00:00.0000Z29.669934.95122102008-05-23T00:00:00.0000Zxxxx1yyyy12012000025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2008-05-23T00:00:00.0000Zxxxx2yyyy220120090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2008-05-23T00:00:00.0000Zxxxx0yyyy02012000-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2019-03-27T00:00:00.0000ZGEOFON Station Falkenstein, Germany51.681911.2655335Germanyfalsetrue2022-12-07T12:00:00.0000Z51.681911.26553352022-12-07T12:00:00.0000Zxxxx0yyyy020100-908381889121M/Sfalse{"description":"GFZ:GE1993:Trillium-360/GSNv2/g=1998","resourceId":"Sensor/20221222151509.221177.537"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2022-12-07T12:00:00.0000Zxxxx1yyyy12010008381889121M/Sfalse{"description":"GFZ:GE1993:Trillium-360/GSNv2/g=1998","resourceId":"Sensor/20221222151509.221177.537"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2022-12-07T12:00:00.0000Zxxxx2yyyy220109008381889121M/Sfalse{"description":"GFZ:GE1993:Trillium-360/GSNv2/g=1998","resourceId":"Sensor/20221222151509.221177.537"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-11-19T00:00:00.0000ZTemp GEOFON Station Temp GEOFON Station Flechtingen, Germany52.330611.2372100Germanyfalsetrue2008-11-19T00:00:00.0000Z52.330611.23721002008-11-19T00:00:00.0000Zxxxx1yyyy120100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2008-11-19T00:00:00.0000Zxxxx2yyyy2201090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2008-11-19T00:00:00.0000Zxxxx0yyyy020100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2011-06-01T00:00:00.0000ZDESERVE Station GEOFON, JSO/DESERVE Station Ghor Haditha/Jordan31.3034135.56648-58JSO/DESERVE Station Ghor Haditha/Jordanfalsetrue2016-01-11T00:00:00.0000Z31.3034135.56648-582016-01-11T00:00:00.0000Zxxxx1639140130020132640001M/Sfalse{"type":"VBB","description":"GFZ:GE1993:Trillium-240/g=1200","model":"Trillium-240","serialNumber":"639","resourceId":"Sensor/20190409123644.756565.551"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2016-01-11T00:00:00.0000Zxxxx26392401390020132640001M/Sfalse{"type":"VBB","description":"GFZ:GE1993:Trillium-240/g=1200","model":"Trillium-240","serialNumber":"639","resourceId":"Sensor/20190409123644.756565.551"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2016-01-11T00:00:00.0000Zxxxx0639040130-9020132640001M/Sfalse{"type":"VBB","description":"GFZ:GE1993:Trillium-240/g=1200","model":"Trillium-240","serialNumber":"639","resourceId":"Sensor/20190409123644.756565.551"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2003-09-12T00:00:00.0000ZNOA/GEOFON Station Gavdos, Greece34.8391424.08738170Greecefalsetrue2003-09-12T00:00:00.0000Z34.8391424.087381702003-09-12T00:00:00.0000Zxxxx1yyyy12010005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2003-09-12T00:00:00.0000Zxxxx2yyyy220109005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2003-09-12T00:00:00.0000Zxxxx0yyyy020100-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2001-12-14T00:00:00.0000ZUKiel/GEOFON Station Helgoland, Germany54.1830927.88301437Germanyfalsetrue2012-02-02T00:00:00.0000Z54.1830927.883014372012-02-02T00:00:00.0000Zxxxx1yyyy12010005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2012-02-02T00:00:00.0000Zxxxx2yyyy220109005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2012-02-02T00:00:00.0000Zxxxx0yyyy020100-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger1999-07-12T00:00:00.0000ZRUB/GEOFON Station Ibbenbueren, Germany52.30637.7592140Germanyfalsetrue2005-09-23T00:00:00.0000Z52.30637.75921402005-09-23T00:00:00.0000Zxxxx1yyyy12011005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2005-09-23T00:00:00.0000Zxxxx2yyyy220119005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2005-09-23T00:00:00.0000Zxxxx0yyyy020110-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2010-07-14T00:00:00.0000ZNOA/GEOFON Station Iera Moni Varipetrou, Greece35.460623.9811230Greecefalsetrue2010-07-14T00:00:00.0000Z35.460623.98112302010-07-14T00:00:00.0000Zxxxx1yyyy12010005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2010-07-14T00:00:00.0000Zxxxx2yyyy220109005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2010-07-14T00:00:00.0000Zxxxx0yyyy020100-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger1996-10-24T00:00:00.0000ZGEOFON/MedNet/KOERI Station Isparta, Turkey37.843330.50931100Turkeyfalsetrue2014-01-16T00:00:00.0000Z37.843330.509311002014-01-16T00:00:00.0000Zxxxx1S1isp1120150038990212800.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-1/VBB/g=2400","manufacturer":"Streckeisen","model":"STS-1/VBB","serialNumber":"S1isp1","resourceId":"Sensor/20190409123644.789465.727"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2014-01-16T00:00:00.0000Zxxxx2S1isp12201590038453342400.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-1/VBB/g=2400","manufacturer":"Streckeisen","model":"STS-1/VBB","serialNumber":"S1isp1","resourceId":"Sensor/20190409123644.789465.727"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2014-01-16T00:00:00.0000Zxxxx0S1isp1020150-9042781860000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-1/VBB/g=2400","manufacturer":"Streckeisen","model":"STS-1/VBB","serialNumber":"S1isp1","resourceId":"Sensor/20190409123644.789465.727"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2014-01-16T00:00:00.0000Z37.843330.509311002014-01-16T00:00:00.0000Zxxxx1yyyy12015005880000000.04M/Sfalse{"type":"VBB","description":"GFZ:GE1993:CMG-3T/100/g=1500","manufacturer":"Guralp","model":"CMG-3T/100","resourceId":"Sensor/20190409123644.799169.758"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2014-01-16T00:00:00.0000Zxxxx2yyyy220159005880000000.04M/Sfalse{"type":"VBB","description":"GFZ:GE1993:CMG-3T/100/g=1500","manufacturer":"Guralp","model":"CMG-3T/100","resourceId":"Sensor/20190409123644.799169.758"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2014-01-16T00:00:00.0000Zxxxx0yyyy020150-905880000000.04M/Sfalse{"type":"VBB","description":"GFZ:GE1993:CMG-3T/100/g=1500","manufacturer":"Guralp","model":"CMG-3T/100","resourceId":"Sensor/20190409123644.799169.758"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2009-11-25T00:00:00.0000ZNOA/RUB/GEOFON Station Karpathos,Greece35.547127.16106524falsetrue2009-11-25T00:00:00.0000Z35.547127.161065242009-11-25T00:00:00.0000Zxxxx1yyyy12010005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2009-11-25T00:00:00.0000Zxxxx2yyyy220109005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2009-11-25T00:00:00.0000Zxxxx0yyyy020100-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2013-06-19T00:00:00.0000ZNOA/GEOFON Station Kythira, Greece36.256623.0621360Greecefalsetrue2013-06-19T00:00:00.0000Z36.256623.06213602013-06-19T00:00:00.0000Zxxxx1yyyy12010005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2013-06-19T00:00:00.0000Zxxxx2yyyy220109005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2013-06-19T00:00:00.0000Zxxxx0yyyy020100-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2005-11-24T00:00:00.0000ZGEOFON Station GEOFON Station Kabul, Afghanistan34.515169.13471820Afghanistanfalsetrue2005-11-24T00:00:00.0000Z34.515169.134718202005-11-24T00:00:00.0000Zxxxx1yyyy12013005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2005-11-24T00:00:00.0000Zxxxx2yyyy220139005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2005-11-24T00:00:00.0000Zxxxx0yyyy020130-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2003-06-24T00:00:00.0000ZRUB Station Keramoti, Crete35.369223.5577240Cretefalsetrue2010-05-15T00:00:00.0000Z35.369223.55772402010-05-15T00:00:00.0000Zxxxx1yyyy12010005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2010-05-15T00:00:00.0000Zxxxx2yyyy220109005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2010-05-15T00:00:00.0000Zxxxx0yyyy020100-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2007-04-24T00:00:00.0000ZU Basilicata/GEOFON Station Matera, Italy40.6490716.70442494Italyfalsetrue2007-04-24T00:00:00.0000Z40.6490716.704424942007-04-24T00:00:00.0000Zxxx1yyyy12015005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","serialNumber":"xxx","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2007-04-24T00:00:00.0000Zxxx2yyyy220159005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","serialNumber":"xxx","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2007-04-24T00:00:00.0000Zxxx0yyyy020150-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","serialNumber":"xxx","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger1993-11-18T00:00:00.0000ZGEOFON Station Moravsky Beroun, Czech Republic49.776617.5428740Czech Republicfalsetrue2010-06-14T00:00:00.0000Z49.776617.54287402010-06-14T00:00:00.0000Zxxxx1yyyy120150025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2010-06-14T00:00:00.0000Zxxxx2yyyy2201590025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2010-06-14T00:00:00.0000Zxxxx0yyyy020150-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2013-06-16T00:00:00.0000ZDESERVE Station Mazada, Israel31.312635.358-220Israelfalsetrue2013-06-16T00:00:00.0000Z31.312635.358-2202013-06-16T00:00:00.0000Zxxxx1yyyy12013005033160001M/Sfalse{"type":"VBB","description":"GFZ:GE1993:Trillium-240/g=1200","model":"Trillium-240","resourceId":"Sensor/20190409123644.756565.551"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2013-06-16T00:00:00.0000Zxxxx2yyyy220139005033160001M/Sfalse{"type":"VBB","description":"GFZ:GE1993:Trillium-240/g=1200","model":"Trillium-240","resourceId":"Sensor/20190409123644.756565.551"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2013-06-16T00:00:00.0000Zxxxx0yyyy020130-905033160001M/Sfalse{"type":"VBB","description":"GFZ:GE1993:Trillium-240/g=1200","model":"Trillium-240","resourceId":"Sensor/20190409123644.756565.551"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger1997-10-07T00:00:00.0000ZGEOFON Station Manteigas, Portugal40.3997-7.5442815Portugalfalsetrue2014-10-09T00:00:00.0000Z40.3997-7.54428152014-10-09T00:00:00.0000Zxxxx1yyyy12013005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2014-10-09T00:00:00.0000Zxxxx2yyyy220139005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2014-10-09T00:00:00.0000Zxxxx0yyyy020130-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2016-10-18T00:00:00.0000ZDMH/GEOFON Station Naypyitaw, Myanmar19.778596.1376158Myanmarfalsetrue2016-10-18T00:00:00.0000Z19.778596.13761582016-10-18T00:00:00.0000Zxxxx1yyyy12012007843140001M/Sfalse{"description":"GFZ:GE1993:CMG-3ESP/30/NPW/g=2000","resourceId":"Sensor/20190409123644.947111.1413"}FDSNXML:Sensor{"type":"Guralp DM24","description":"GFZ:GE1993:DM24/g=392157","manufacturer":"Guralp","model":"DM24","resourceId":"Datalogger/20190409123644.947517.1415"}FDSNXML:DataLogger2016-10-18T00:00:00.0000Zxxxx2yyyy220129007843140001M/Sfalse{"description":"GFZ:GE1993:CMG-3ESP/30/NPW/g=2000","resourceId":"Sensor/20190409123644.947111.1413"}FDSNXML:Sensor{"type":"Guralp DM24","description":"GFZ:GE1993:DM24/g=392157","manufacturer":"Guralp","model":"DM24","resourceId":"Datalogger/20190409123644.947517.1415"}FDSNXML:DataLogger2016-10-18T00:00:00.0000Zxxxx0yyyy020120-907843140001M/Sfalse{"description":"GFZ:GE1993:CMG-3ESP/30/NPW/g=2000","resourceId":"Sensor/20190409123644.947111.1413"}FDSNXML:Sensor{"type":"Guralp DM24","description":"GFZ:GE1993:DM24/g=392157","manufacturer":"Guralp","model":"DM24","resourceId":"Datalogger/20190409123644.947517.1415"}FDSNXML:DataLogger2012-04-23T00:00:00.0000ZLGT/GEOFON Station Paberze, Lithuania55.5052323.9679658Lithuaniafalsetrue2012-04-23T00:00:00.0000Z55.5052323.96796582012-04-23T00:00:00.0000Zxxxx1yyyy12015005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2012-04-23T00:00:00.0000Zxxxx2yyyy220159005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2012-04-23T00:00:00.0000Zxxxx0yyyy020150-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2011-11-03T00:00:00.0000ZLGT/GEOFON Station Paburge, Lithuania56.0236121.92945126Lithuaniafalsetrue2011-11-03T00:00:00.0000Z56.0236121.929451262011-11-03T00:00:00.0000Zxxxx1yyyy12013005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2011-11-03T00:00:00.0000Zxxxx2yyyy220139005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2011-11-03T00:00:00.0000Zxxxx0yyyy020130-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger1998-05-05T00:00:00.0000ZGEOFON Station Pulkovo, Russia59.76730.31765Russiafalsetrue2011-01-01T00:00:00.0000Z59.76730.317652011-01-01T00:00:00.0000Zxxxx1yyyy120110005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2011-01-01T00:00:00.0000Zxxxx2yyyy2201109005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2011-01-01T00:00:00.0000Zxxxx0yyyy0201100-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger1995-12-01T00:00:00.0000ZGRSN/GEOFON Station Ruegen, Germany54.547713.321415Germanyfalsetrue2014-05-02T12:48:00.0000Z54.547713.3214152014-05-02T12:48:00.0000Zxxxx1yyyy12012005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2014-05-02T12:48:00.0000Zxxxx2yyyy220129005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2014-05-02T12:48:00.0000Zxxxx0yyyy020120-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2000-01-19T00:00:00.0000ZGRSN/GEOFON Station Ruedersdorf, Germany52.475913.7840Germanyfalsetrue2012-03-21T10:00:00.0000Z52.475913.78402012-03-21T10:00:00.0000Zxxxx1yyyy12013005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2012-03-21T10:00:00.0000Zxxxx2yyyy220139005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2012-03-21T10:00:00.0000Zxxxx0yyyy020130-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2015-10-11T00:00:00.0000ZDESERVE Station Salfit, Palestine Territories32.068635.1914475Palestine Territoriesfalsetrue2015-10-11T00:00:00.0000Z32.068635.19144752015-10-11T00:00:00.0000Zxxxx1yyyy140130020132640001M/Sfalse{"type":"VBB","description":"GFZ:GE1993:Trillium-240/g=1200","model":"Trillium-240","resourceId":"Sensor/20190409123644.756565.551"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2015-10-11T00:00:00.0000Zxxxx2yyyy2401390020132640001M/Sfalse{"type":"VBB","description":"GFZ:GE1993:Trillium-240/g=1200","model":"Trillium-240","resourceId":"Sensor/20190409123644.756565.551"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2015-10-11T00:00:00.0000Zxxxx0yyyy040130-9020132640001M/Sfalse{"type":"VBB","description":"GFZ:GE1993:Trillium-240/g=1200","model":"Trillium-240","resourceId":"Sensor/20190409123644.756565.551"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2018-11-09T00:00:00.0000ZGEOFON/INPRES Station Salta, Argentina-24.2241-66.344000Argentinafalsetrue2019-04-03T00:00:00.0000Z-24.2241-66.3440002019-04-03T00:00:00.0000Zxxxx1yyyy120100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2019-04-03T00:00:00.0000Zxxxx2yyyy2201090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2019-04-03T00:00:00.0000Zxxxx0yyyy020100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2006-10-25T00:00:00.0000ZLEGMA/GEOFON Station LEGMA, GEOFON Station Slitere/Latvia57.628722.290515GEOFON Station Slitere/Latviafalsetrue2006-10-25T00:00:00.0000Z57.628722.2905152006-10-25T00:00:00.0000Zxxxx1yyyy12015005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2006-10-25T00:00:00.0000Zxxxx2yyyy220159005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2006-10-25T00:00:00.0000Zxxxx0yyyy020150-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2003-01-10T00:00:00.0000ZGEOFON/AWI Station Sanae, Antarctica-71.6707-2.8379846Antarcticafalsetrue2011-01-15T09:56:00.0000Z-71.6707-2.83798462011-01-15T09:56:00.0000Zxxxx1yyyy12011003.35544e+100.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=20000","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123645.111252.2011"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2011-01-15T09:56:00.0000Zxxxx2yyyy220119003.35544e+100.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=20000","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123645.111252.2011"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2011-01-15T09:56:00.0000Zxxxx0yyyy020110-903.35544e+100.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=20000","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123645.111252.2011"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger1994-04-29T00:00:00.0000ZVBB Station Stuttgart, Germany48.77199.195360Germanyfalsetrue2015-09-03T14:55:00.0000Z48.77199.1953602015-09-03T14:55:00.0000Zxxxx1yyyy1201100015000000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-HG/g=1000000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.683708.175"}FDSNXML:DataLogger2015-09-03T14:55:00.0000Zxxxx2yyyy22011090015000000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-HG/g=1000000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.683708.175"}FDSNXML:DataLogger2015-09-03T14:55:00.0000Zxxxx0yyyy0201100-9015000000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-HG/g=1000000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.683708.175"}FDSNXML:DataLogger2003-10-13T00:00:00.0000ZGEOFON Station Tirgusor, Romania44.458128.412877Romaniafalsetrue2003-10-13T00:00:00.0000Z44.458128.4128772003-10-13T00:00:00.0000Zxxxx1yyyy12013005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2003-10-13T00:00:00.0000Zxxxx2yyyy220139005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2003-10-13T00:00:00.0000Zxxxx0yyyy020130-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2013-04-22T00:00:00.0000ZDESERVE Station Al Uja, Palestine Territories31.9520135.46435-200Palestine Territoriesfalsetrue2013-04-22T00:00:00.0000Z31.9520135.46435-2002013-04-22T00:00:00.0000Z39521640120130020132640001M/Sfalse{"type":"VBB","description":"GFZ:GE1993:Trillium-240/g=1200","model":"Trillium-240","serialNumber":"640","resourceId":"Sensor/20190409123644.756565.551"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"3952","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2013-04-22T00:00:00.0000Z395226402201390020132640001M/Sfalse{"type":"VBB","description":"GFZ:GE1993:Trillium-240/g=1200","model":"Trillium-240","serialNumber":"640","resourceId":"Sensor/20190409123644.756565.551"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"3952","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2013-04-22T00:00:00.0000Z39520640020130-9020132640001M/Sfalse{"type":"VBB","description":"GFZ:GE1993:Trillium-240/g=1200","model":"Trillium-240","serialNumber":"640","resourceId":"Sensor/20190409123644.756565.551"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","serialNumber":"3952","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2003-04-26T00:00:00.0000ZGEOFON Station Vasula, Estonia58.46226.734763Estoniafalsetrue2003-04-26T00:00:00.0000Z58.46226.7347632003-04-26T00:00:00.0000Zxxxx1yyyy12013005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2003-04-26T00:00:00.0000Zxxxx2yyyy220139005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2003-04-26T00:00:00.0000Zxxxx0yyyy020130-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2008-01-01T00:00:00.0000ZGSN/GEOFON Station GSN, GEOFON Station Windhoek/Namibia-22.569117.09151720GEOFON Station Windhoek/Namibiafalsetrue2008-01-01T00:00:00.0000Z-22.569117.091517202008-01-01T00:00:00.0000Zxxxx1yyyy12010005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2008-01-01T00:00:00.0000Zxxxx2yyyy220109005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2008-01-01T00:00:00.0000Zxxxx0yyyy020100-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger1994-03-15T00:00:00.0000ZGEOFON Station Walferdange, Luxembourg49.66466.1526295Luxembourgfalsetrue2012-12-07T18:00:00.0000Z49.66466.15262952012-12-07T18:00:00.0000Zxxxx1yyyy1201800025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2012-12-07T18:00:00.0000Zxxxx2yyyy22018090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2012-12-07T18:00:00.0000Zxxxx0yyyy0201800-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2011-07-19T11:49:00.0000ZNOA/GEOFON Station Zakros, Crete35.114726.21691254Cretefalsetrue2011-07-19T11:49:00.0000Z35.114726.216912542011-07-19T11:49:00.0000Zxxxx1yyyy12012005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2011-07-19T11:49:00.0000Zxxxx2yyyy220129005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2011-07-19T11:49:00.0000Zxxxx0yyyy020120-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2008-09-17T00:00:00.0000ZGEOFON Station GEOFON Station Dhamar, Yemen14.572544.39172488Yemenfalsetrue2008-09-17T00:00:00.0000Z14.572544.391724882008-09-17T00:00:00.0000Zxxxx1yyyy12012006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-09-17T00:00:00.0000Zxxxx2yyyy220129006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-09-17T00:00:00.0000Zxxxx0yyyy020120-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2009-01-22T00:00:00.0000ZGEOFON Station GEOFON Station Socotra, Yemen12.5195253.98742804Yemenfalsetrue2009-01-22T00:00:00.0000Z12.5195253.987428042009-01-22T00:00:00.0000Zxxxx1yyyy120100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2009-01-22T00:00:00.0000Zxxxx2yyyy2201090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2009-01-22T00:00:00.0000Zxxxx0yyyy020100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2011-09-13T00:00:00.0000ZGEOFON Station Kibwezi, Kenya-2.3590738.0433790Kenyafalsetrue2011-09-13T00:00:00.0000Z-2.3590738.04337902011-09-13T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2011-09-13T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2011-09-13T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2011-09-26T00:00:00.0000ZGEOFON Station Lodwar, Kenya3.4219535.36165665Kenyafalsetrue2011-09-26T00:00:00.0000Z3.4219535.361656652011-09-26T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2011-09-26T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2011-09-26T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2009-11-19T00:00:00.0000ZGEOFON Station Sambava, Madagascar-13.458449.921265Madagascarfalsetrue2009-11-19T00:00:00.0000Z-13.458449.9212652009-11-19T00:00:00.0000Zxxxx1yyyy120100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2009-11-19T00:00:00.0000Zxxxx2yyyy2201090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2009-11-19T00:00:00.0000Zxxxx0yyyy020100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2009-11-26T00:00:00.0000ZGEOFON Station Vohitsoka, Madagascar-21.9064846.793271158Madagascarfalsetrue2009-11-26T00:00:00.0000Z-21.9064846.7932711582009-11-26T00:00:00.0000Zxxxx1yyyy120100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2009-11-26T00:00:00.0000Zxxxx2yyyy2201090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2009-11-26T00:00:00.0000Zxxxx0yyyy020100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2008-01-22T00:00:00.0000ZGEOFON Station GEOFON Station Hanimaadhoo, Maldives6.7731473.182161Maldivesfalsetrue2008-01-22T00:00:00.0000Z6.7731473.1821612008-01-22T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-01-22T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-01-22T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-01-29T00:00:00.0000ZGEOFON Station GEOFON Station Kaadhehdhoo, Maldives0.4926472.994861Maldivesfalsetrue2008-01-29T00:00:00.0000Z0.4926472.9948612008-01-29T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-01-29T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-01-29T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2010-04-10T00:00:00.0000ZGEOFON Station Mahakanadarawa, Sri Lanka8.3968280.54254113Sri Lankafalsetrue2010-04-10T00:00:00.0000Z8.3968280.542541132010-04-10T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2010-04-10T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2010-04-10T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2011-05-12T00:00:00.0000ZGEOFON Station Hakmana, Sri Lanka6.087780.6806120Sri Lankafalsetrue2011-05-12T00:00:00.0000Z6.087780.68061202011-05-12T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2011-05-12T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2011-05-12T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2016-09-05T00:00:00.0000ZGEOFON Station GEOFON Station Bungbulang, Garut, Java-7.462603107.65615Javafalsetrue2016-09-05T00:00:00.0000Z-7.462603107.656152016-09-05T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2016-09-05T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2016-09-05T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2009-06-09T00:00:00.0000ZGEOFON Station GEOFON Station Balikpapan, Kalimantan-1.1073116.9048110Kalimantanfalsetrue2009-06-09T00:00:00.0000Z-1.1073116.90481102009-06-09T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2009-06-09T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2009-06-09T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-04-29T00:00:00.0000ZGEOFON Station GEOFON Station Bangkinang, Sumatra0.3262101.039665Sumatrafalsetrue2008-04-29T00:00:00.0000Z0.3262101.0396652008-04-29T00:00:00.0000Zxxxx1yyyy12013006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-04-29T00:00:00.0000Zxxxx2yyyy220139006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-04-29T00:00:00.0000Zxxxx0yyyy020130-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-07-04T00:00:00.0000ZGEOFON Station Bandaneira, Indonesia-4.5224129.904516Indonesiafalsetrue2008-07-04T00:00:00.0000Z-4.5224129.9045162008-07-04T00:00:00.0000Zxxxx1yyyy12012006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-07-04T00:00:00.0000Zxxxx2yyyy220129006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-07-04T00:00:00.0000Zxxxx0yyyy020120-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-12-01T00:00:00.0000ZGEOFON Station GEOFON Station Fak Fak, Irian Jaya-2.91925132.24889136Irian Jayafalsetrue2008-12-01T00:00:00.0000Z-2.91925132.248891362008-12-01T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-12-01T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-12-01T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2009-11-26T00:00:00.0000ZGEOFON Station Genyem, Irian Jaya Indonesia-2.5927140.167847Irian Jaya Indonesiafalsetrue2009-11-26T00:00:00.0000Z-2.5927140.1678472009-11-26T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2009-11-26T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2009-11-26T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2005-10-02T00:00:00.0000ZGEOFON Station Gunungsitoli, Nias1.303997.5755107Niasfalsetrue2014-03-20T00:00:00.0000Z1.303997.57551072014-03-20T00:00:00.0000Zxxxx1yyyy120100025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2014-03-20T00:00:00.0000Zxxxx2yyyy2201090025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2014-03-20T00:00:00.0000Zxxxx0yyyy020100-9025165800000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/HR/g=1677720","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.666722.77"}FDSNXML:DataLogger2008-07-21T00:00:00.0000ZGEOFON Station GEOFON Station Jajag, Java-8.4702114.1521171Javafalsetrue2008-07-21T00:00:00.0000Z-8.4702114.15211712008-07-21T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-07-21T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-07-21T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-02-29T00:00:00.0000ZGEOFON Station GEOFON Station Lhokseumave, Sumatra5.228896.94723Sumatrafalsetrue2008-02-29T00:00:00.0000Z5.228896.947232008-02-29T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-02-29T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-02-29T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-03-10T00:00:00.0000ZGEOFON Station GEOFON Station Lhokseumave, Sulawesi-1.0418122.77176Sulawesifalsetrue2008-03-10T00:00:00.0000Z-1.0418122.771762008-03-10T00:00:00.0000Zxxxx1yyyy12012006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-03-10T00:00:00.0000Zxxxx2yyyy220129006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2008-03-10T00:00:00.0000Zxxxx0yyyy020120-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2006-11-09T00:00:00.0000ZGEOFON Station GEOFON Station Maumere, Flores-8.6357122.2376137Floresfalsetrue2006-11-09T00:00:00.0000Z-8.6357122.23761372006-11-09T00:00:00.0000Zxxxx1yyyy12013006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2006-11-09T00:00:00.0000Zxxxx2yyyy220139006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2006-11-09T00:00:00.0000Zxxxx0yyyy020130-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2006-07-27T00:00:00.0000ZGEOFON Station GEOFON Station Manna, Sumatra-4.3605102.9557154Sumatrafalsetrue2006-07-27T00:00:00.0000Z-4.3605102.95571542006-07-27T00:00:00.0000Zxxxx1yyyy12013006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2006-07-27T00:00:00.0000Zxxxx2yyyy220139006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2006-07-27T00:00:00.0000Zxxxx0yyyy020130-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2011-03-04T00:00:00.0000ZGEOFON Station Plampang, Sumbawa, Indonesia-8.8275117.7765125Indonesiafalsetrue2011-03-04T00:00:00.0000Z-8.8275117.77651252011-03-04T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2011-03-04T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2011-03-04T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2009-05-30T00:00:00.0000ZGEOFON Station Palembang, Sumatra-2.9024104.699325Sumatrafalsetrue2009-05-30T00:00:00.0000Z-2.9024104.6993252009-05-30T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2009-05-30T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2009-05-30T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2009-12-03T00:00:00.0000ZGEOFON Station Sanana, Moluccas, Indonesia-2.0496125.988130Indonesiafalsetrue2009-12-03T00:00:00.0000Z-2.0496125.9881302009-12-03T00:00:00.0000Zxxxx1yyyy12012006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2009-12-03T00:00:00.0000Zxxxx2yyyy220129006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2009-12-03T00:00:00.0000Zxxxx0yyyy020120-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2009-08-25T00:00:00.0000ZGEOFON Station Saumlaki, Tanimbar, Indonesia-7.9826131.2988110Indonesiafalsetrue2009-08-25T00:00:00.0000Z-7.9826131.29881102009-08-25T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2009-08-25T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2009-08-25T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2006-11-01T00:00:00.0000ZGEOFON Station GEOFON Station Semarang, Java-7.04915110.44067203Javafalsetrue2006-11-01T00:00:00.0000Z-7.04915110.440672032006-11-01T00:00:00.0000Zxxxx1yyyy12013006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2006-11-01T00:00:00.0000Zxxxx2yyyy220139006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2006-11-01T00:00:00.0000Zxxxx0yyyy020130-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2006-12-13T00:00:00.0000ZGEOFON Station GEOFON Station Soe, Timor-9.7553124.26721047Timorfalsetrue2006-12-13T00:00:00.0000Z-9.7553124.267210472006-12-13T00:00:00.0000Zxxxx1yyyy12010006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2006-12-13T00:00:00.0000Zxxxx2yyyy220109006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2006-12-13T00:00:00.0000Zxxxx0yyyy020100-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2007-01-05T00:00:00.0000ZGEOFON Station GEOFON Station Ternate, Indonesia0.7718127.366743Indonesiafalsetrue2007-01-05T00:00:00.0000Z0.7718127.3667432007-01-05T00:00:00.0000Zxxxx1yyyy12013006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2007-01-05T00:00:00.0000Zxxxx2yyyy220139006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2007-01-05T00:00:00.0000Zxxxx0yyyy020130-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2012-07-13T00:00:00.0000ZGEOFON Station Tolitoli, Sulawesi, Indonesia1.11119120.7817451Indonesiafalsetrue2012-07-13T00:00:00.0000Z1.11119120.78174512012-07-13T00:00:00.0000Zxxxx1yyyy12012006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2012-07-13T00:00:00.0000Zxxxx2yyyy220129006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2012-07-13T00:00:00.0000Zxxxx0yyyy020120-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger1995-08-23T00:00:00.0000ZGEOFON Station Wanagama, Indonesia-7.9125110.5231350Indonesiafalsetrue2006-06-29T00:00:00.0000Z-7.9125110.52313502006-06-29T00:00:00.0000Zxxxx1yyyy12012006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2006-06-29T00:00:00.0000Zxxxx2yyyy220129006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2006-06-29T00:00:00.0000Zxxxx0yyyy020120-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger1993-09-10T00:00:00.0000Z(GSN) IRIS/USGS (IU),GEOFON(GE)Poseidon,PM Geo Obs-9.4047147.159790PNGfalsetrue2011-07-29T18:28:00.0000Z-9.4047147.1597902011-07-29T18:28:00.0000Zxxxx1yyyy12010358032252500000.02m/sfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor{"resourceId":"Datalogger/20190326190422.706373.1095"}FDSNXML:DataLogger2011-07-29T18:28:00.0000Zxxxx2yyyy2201088032667400000.02m/sfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor{"resourceId":"Datalogger/20190326190422.706598.1100"}FDSNXML:DataLogger2011-07-29T18:28:00.0000Zxxxx0yyyy020100-9041226400000.02m/sfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor{"resourceId":"Datalogger/20190326190422.706829.1105"}FDSNXML:DataLogger2023-01-11T00:00:00.0000Z-9.4047147.1597902023-01-11T00:00:00.0000Zxxxx1yyyy140101024478100000.02m/sfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor{"resourceId":"Datalogger/20230220174426.773094.1274"}FDSNXML:DataLogger2023-01-11T00:00:00.0000Zxxxx2yyyy2401091024478100000.02m/sfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor{"resourceId":"Datalogger/20230220174426.773519.1279"}FDSNXML:DataLogger2023-01-11T00:00:00.0000Zxxxx0yyyy040100-9024478100000.02m/sfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor{"resourceId":"Datalogger/20230220174426.774024.1284"}FDSNXML:DataLogger2015-05-18T00:00:00.0000Z-9.4047147.1597902015-05-18T00:00:00.0000Zxxxx1yyyy14010009244430000.05m/sfalse{"description":"Metrozet M2166 VBB seismometer"}FDSNXML:Sensor{"resourceId":"Datalogger/20190326190422.717873.1337"}FDSNXML:DataLogger2015-05-18T00:00:00.0000Zxxxx2yyyy2401090010012000000.05m/sfalse{"description":"Metrozet M2166 VBB seismometer"}FDSNXML:Sensor{"resourceId":"Datalogger/20190326190422.718116.1342"}FDSNXML:DataLogger2015-05-18T00:00:00.0000Zxxxx0yyyy040100-9010158800000.05m/sfalse{"description":"Metrozet M2166 VBB seismometer"}FDSNXML:Sensor{"resourceId":"Datalogger/20190326190422.718346.1347"}FDSNXML:DataLogger2010-04-15T00:00:00.0000ZKilima Mbogo, Kenya-1.127137.25251950Kenyafalsetrue{"id":1,"value":"Timing MAY be in error, as clock is unlocked."}FDSNXML:Comment/12001-10-05T08:00:00.0000Z{"id":2,"value":"Station Maintenance. Tests may be in progress."}FDSNXML:Comment/22002-02-28T10:10:00.0000Z{"id":3,"value":"Timing MAY be in error, as GPS system is not operating consistently."}FDSNXML:Comment/32005-05-24T14:28:00.0000Z{"id":4,"value":"Timing IS in error, as clock is unlocked."}FDSNXML:Comment/42005-05-24T14:28:00.0000Z{"id":5,"value":"Clock Unlocked."}FDSNXML:Comment/52012-05-30T00:00:00.0000Z2014-05-14T00:00:00.0000Z-1.127137.25249919302014-05-14T00:00:00.0000Zxxxx0yyyy0201200-9040332400000.05M/Sfalse{"id":1,"value":"Incorrect instrument response for low frequencies only"}FDSNXML:Comment/12003-01-14T00:00:00.0000Z{"id":2,"value":"Incorrect instrument response for low frequencies only"}FDSNXML:Comment/22004-04-13T18:43:00.0000Z{"id":3,"value":"Response may be invalid."}FDSNXML:Comment/32004-04-13T18:43:00.0000Z{"id":4,"value":"Seismometer mass is against stops."}FDSNXML:Comment/42005-05-15T07:25:00.0000Z{"id":5,"value":"Channel data quality is poor."}FDSNXML:Comment/52010-10-18T00:00:00.0000Z{"description":"Streckeisen STS-1VBB w/E300","resourceId":"Sensor/20190326190420.38691.1072"}FDSNXML:Sensor{"resourceId":"Datalogger/20190326190420.386881.1071"}FDSNXML:DataLogger2015-09-22T10:30:00.0000Z-1.127137.25249919502015-09-22T10:30:00.0000Zxxxx0yyyy0401100-9019746800000.02M/Sfalse{"id":1,"value":"Channel is down."}FDSNXML:Comment/12002-03-23T04:09:00.0000Z{"id":2,"value":"Channel is down."}FDSNXML:Comment/22006-04-08T02:51:00.0000Z{"id":3,"value":"Channel is down."}FDSNXML:Comment/32010-03-20T00:00:00.0000Z{"id":4,"value":"Channel data quality is poor."}FDSNXML:Comment/42010-10-18T00:00:00.0000Z{"description":"T120 post hole, quiet","resourceId":"Sensor/20190326190420.397169.1269"}FDSNXML:Sensor{"resourceId":"Datalogger/20190326190420.397141.1268"}FDSNXML:DataLogger2020-02-27T13:30:00.0000ZGEOFON/DIAS Station Belurgan, Co Louth, Ireland54.0279-6.336221Irelandfalsetrue2020-02-27T13:30:00.0000Z54.0279-6.3362212020-02-27T13:30:00.0000Zxxxx0yyyy020110-904720530001M/Sfalse{"resourceId":"Sensor/20210527151050.048617.36"}FDSNXML:Sensor{"resourceId":"Datalogger/20210527151050.046904.30"}FDSNXML:DataLogger2020-02-27T13:30:00.0000Zxxxx2yyyy220119004720530001M/Sfalse{"resourceId":"Sensor/20210527151050.050448.45"}FDSNXML:Sensor{"resourceId":"Datalogger/20210527151050.048857.39"}FDSNXML:DataLogger2020-02-27T13:30:00.0000Zxxxx1yyyy12011004720530001M/Sfalse{"resourceId":"Sensor/20210527151050.052358.54"}FDSNXML:Sensor{"resourceId":"Datalogger/20210527151050.050702.48"}FDSNXML:DataLogger2021-04-22T14:00:00.0000ZGEOFON/DIAS Station Dublin, Ireland53.2452-6.3762236Irelandfalsetrue2021-04-23T15:58:17.81Z53.2452-6.37622362021-04-23T15:58:17.81Zxxxx0yyyy020100-904720530001M/Sfalse{"resourceId":"Sensor/20210527151049.801138.36"}FDSNXML:Sensor{"resourceId":"Datalogger/20210527151049.799816.30"}FDSNXML:DataLogger2021-04-23T15:58:18.777Zxxxx1yyyy12010004720530001M/Sfalse{"resourceId":"Sensor/20210527151049.802608.45"}FDSNXML:Sensor{"resourceId":"Datalogger/20210527151049.801345.39"}FDSNXML:DataLogger2021-04-23T15:58:19.422Zxxxx2yyyy220109004720530001M/Sfalse{"resourceId":"Sensor/20210527151049.804539.54"}FDSNXML:Sensor{"resourceId":"Datalogger/20210527151049.802824.48"}FDSNXML:DataLogger2021-06-04T00:00:00.0000ZGEOFON/IMAA-CNR Station Tramutola, Italy40.2972415.80632933Italyfalsetrue2021-06-04T00:00:00.0000Z40.2972415.806329332021-06-04T00:00:00.0000Z2713111024412011006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2.5/g=1500","manufacturer":"Streckeisen","serialNumber":"110244","resourceId":"Sensor/20220203144236.657155.1173"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"2713","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2021-06-04T00:00:00.0000Z27132110244220119006291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2.5/g=1500","manufacturer":"Streckeisen","serialNumber":"110244","resourceId":"Sensor/20220203144236.657155.1173"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"2713","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2021-06-04T00:00:00.0000Z27130110244020110-906291450000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2.5/g=1500","manufacturer":"Streckeisen","serialNumber":"110244","resourceId":"Sensor/20220203144236.657155.1173"}FDSNXML:Sensor{"type":"Quanterra Q330","description":"GFZ:GE1993:Q330/N/g=419430","manufacturer":"Quanterra","model":"Q330","serialNumber":"2713","resourceId":"Datalogger/20190409123644.670096.98"}FDSNXML:DataLogger2019-06-14T00:00:00.0000ZStation Santorini, Greece36.36699225.475263288Greecefalsetrue2019-06-14T00:00:00.0000Z36.36699225.4752632882019-06-14T00:00:00.0000Zxxxx1yyyy12010005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2019-06-14T00:00:00.0000Zxxxx2yyyy220109005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2019-06-14T00:00:00.0000Zxxxx0yyyy020100-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2021-04-29T12:30:00.0000ZGEOFON/DIAS Station Valentia, Ireland51.9394-10.244214Irelandfalsetrue2021-05-11T14:33:21.696Z51.9394-10.2442142021-05-11T14:33:21.696Zxxxx0yyyy020100-904720530001M/Sfalse{"resourceId":"Sensor/20210527151050.315898.36"}FDSNXML:Sensor{"resourceId":"Datalogger/20210527151050.314154.30"}FDSNXML:DataLogger2021-05-11T14:33:22.851Zxxxx1yyyy12010004720530001M/Sfalse{"resourceId":"Sensor/20210527151050.317921.45"}FDSNXML:Sensor{"resourceId":"Datalogger/20210527151050.316138.39"}FDSNXML:DataLogger2021-05-11T14:33:24.122Zxxxx2yyyy220109004720530001M/Sfalse{"resourceId":"Sensor/20210527151050.319884.54"}FDSNXML:Sensor{"resourceId":"Datalogger/20210527151050.318193.48"}FDSNXML:DataLogger2017-07-05T10:00:00.0000ZGKKI/GEOFON Station Piszkes, Hungary47.9184000119.89440001940Hungaryfalsetrue2017-07-05T10:00:00.0000Z47.9184000119.894400019402017-07-05T10:00:00.0000Zxxxx0yyyy020120-905880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2017-07-05T10:00:00.0000Zxxxx1yyyy12012005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger2017-07-05T10:00:00.0000Zxxxx2yyyy220129005880000000.02M/Sfalse{"type":"VBB","description":"GFZ:GE1993:STS-2/N/g=1500","manufacturer":"Streckeisen","model":"STS-2/N","resourceId":"Sensor/20190409123644.652274.6"}FDSNXML:Sensor{"type":"EarthData PS6-SC","description":"GFZ:GE1993:PS6-SC/g=392000","manufacturer":"EarthData","model":"PS6-SC","resourceId":"Datalogger/20190409123644.659328.35"}FDSNXML:DataLogger diff --git a/etc/inventory/inventory_II.xml b/etc/inventory/inventory_II.xml new file mode 100644 index 0000000..6941675 --- /dev/null +++ b/etc/inventory/inventory_II.xml @@ -0,0 +1,2 @@ + +Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1H/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1V/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1H/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1H/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1V/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1H/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Metrozet M2166-VBH Seismometerm/s{"unit":"Velocity in Meters Per Second"}Metrozet M2166-VBH Seismometerm/s{"unit":"Velocity in Meters Per Second"}Metrozet M2166-VBV Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1H/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1H/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1V/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-5A Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-5A Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-5A Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Boreholem/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 240 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 240 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 240 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1 Seismometer with Metrozet E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6 Seismometerm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}Nanometrics Trillium 360 Seismometer - Vaultm/s{"unit":"Velocity in Meters Per Second"}15838105e-05ResponseFIR/20230413102640.43891.915837805e-05ResponseFIR/20230413102640.43891.915834105e-05ResponseFIR/20230413102640.43891.916718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2615833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.916718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2615837805e-05ResponseFIR/20230413102640.43891.915828605e-05ResponseFIR/20230413102640.43891.915840805e-05ResponseFIR/20230413102640.43891.916718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2615833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.916718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2615833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.916718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.43891.916551405e-05ResponseFIR/20230413102640.43891.916551405e-05ResponseFIR/20230413102640.43891.910.0001ResponsePAZ/20230413102640.497731.279 ResponseFIR/20230413102640.497867.28010.0001ResponsePAZ/20230413102640.497731.279 ResponseFIR/20230413102640.497867.28016551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2615833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.916551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2615833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.916551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616621505e-05ResponseFIR/20230413102640.43891.916621505e-05ResponseFIR/20230413102640.43891.916621505e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.916718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2615833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.916718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616621505e-05ResponseFIR/20230413102640.43891.916621505e-05ResponseFIR/20230413102640.43891.916621505e-05ResponseFIR/20230413102640.43891.916718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2615833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.915833305e-05ResponseFIR/20230413102640.43891.915824405e-05ResponseFIR/20230413102640.43891.915832305e-05ResponseFIR/20230413102640.43891.915827405e-05ResponseFIR/20230413102640.43891.916551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616551405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.2616718405e-05ResponseFIR/20230413102640.443535.26B3577.840.053.940450.0556(0,0) (0,0) (-28.125,0) (-0.0242718,0) (-0.0242718,0)(-0.00192859,0.00190741) (-0.00192859,-0.00190741) (-0.0242951,0.00217956) (-0.0242951,-0.00217956) (-5.82372,8.77332) (-5.82372,-8.77332)B2765.340.053.362140.0556(0,0) (0,0) (-42.1875,0) (-0.023939,0) (-0.023939,0)(-0.00218028,0.00134866) (-0.00218028,-0.00134866) (-0.0247006,0.00209782) (-0.0247006,-0.00209782) (-6.01498,10.1945) (-6.01498,-10.1945)B2832.460.052.58290.0556(0,0) (0,0) (-63.2812,0) (-0.0232671,0) (-0.0232671,0)(-0.00240205,0.000528486) (-0.00240205,-0.000528486) (-0.0247884,0) (-0.0247884,0) (-6.15204,11.0189) (-6.15204,-11.0189)B1995.40.0523.9950.0567(0,0) (0,0) (-5.04597,0) (-75.7602,0) (-22.8292,0) (-493.38,0)(-0.00195649,0.00195999) (-0.00195649,-0.00195999) (-5.31418,0) (-22.6997,0) (-25.3758,43.5261) (-25.3758,-43.5261) (-337.408,0)B1994.60.0523.9950.0567(0,0) (0,0) (-5.04597,0) (-75.7602,0) (-22.8292,0) (-493.38,0)(-0.00195649,0.00195999) (-0.00195649,-0.00195999) (-5.31418,0) (-22.6997,0) (-25.3758,43.5261) (-25.3758,-43.5261) (-337.408,0)B1999.40.0523.9950.0567(0,0) (0,0) (-5.04597,0) (-75.7602,0) (-22.8292,0) (-493.38,0)(-0.00195649,0.00195999) (-0.00195649,-0.00195999) (-5.31418,0) (-22.6997,0) (-25.3758,43.5261) (-25.3758,-43.5261) (-337.408,0)B2777.370.053.123150.0556(0,0) (0,0) (-42.1875,0) (-0.0242718,0) (-0.0242718,0)(-0.00193093,0.00192513) (-0.00193093,-0.00192513) (-0.0229825,0) (-0.025799,0) (-6.34929,9.54702) (-6.34929,-9.54702)B2827.040.053.124880.0556(0,0) (0,0) (-42.1875,0) (-0.0242718,0) (-0.0242718,0)(-0.0019291,0.00193105) (-0.0019291,-0.00193105) (-0.0229825,0) (-0.025799,0) (-6.371,9.53643) (-6.371,-9.53643)B2657.650.053.860850.0556(0,0) (0,0) (-42.1875,0) (-0.0242718,0) (-0.0242718,0)(-0.0019363,0.00192619) (-0.0019363,-0.00192619) (-0.0229825,0) (-0.025799,0) (-6.73968,10.8206) (-6.73968,-10.8206)B24180.053.362950.0556(0,0) (0,0) (-18.75,0) (-0.0295915,0) (-0.0295915,0)(-0.00197766,0.00191209) (-0.00197766,-0.00191209) (-0.0298008,0) (-0.0298008,0) (-4.8781,6.24678) (-4.8781,-6.24678)B2445.980.052.959490.0556(0,0) (0,0) (-18.75,0) (-0.0338125,0) (-0.0338125,0)(-0.00194448,0.00195809) (-0.00194448,-0.00195809) (-0.0334955,0.00502329) (-0.0334955,-0.00502329) (-4.47597,5.99433) (-4.47597,-5.99433)B24480.052.223930.0556(0,0) (0,0) (-42.1875,0) (-0.0690153,0) (-0.0690153,0)(-0.00155053,0.00241727) (-0.00155053,-0.00241727) (-0.0747997,0) (-0.0747997,0) (-5.69966,7.19796) (-5.69966,-7.19796)B1102.640.0535.1170.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.0018737,0.00191787) (-0.0018737,-0.00191787) (-5.17938,0) (-21.639,0) (-47.8492,57.35) (-47.8492,-57.35) (-196.397,0)B1107.370.0535.51680.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.00188521,0.00190876) (-0.00188521,-0.00190876) (-5.1793,0) (-21.6348,0) (-48.2511,57.5753) (-48.2511,-57.5753) (-196.397,0)B1091.640.0535.38340.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.00188137,0.00191182) (-0.00188137,-0.00191182) (-5.17933,0) (-21.6362,0) (-48.1173,57.5003) (-48.1173,-57.5003) (-196.397,0)B2539.630.05100.060.0524(0,0) (0,0)(-6.28,7.78213) (-6.28,-7.78213) (-0.002142,0.001753) (-0.002142,-0.001753)B2419.660.0596.13270.0524(0,0) (0,0)(-6.27451,7.53309) (-6.27451,-7.53309) (-0.002003,0.001898) (-0.002003,-0.001898)B2572.990.05100.0760.0524(0,0) (0,0)(-6.29,7.77405) (-6.29,-7.77405) (-0.002157,0.001657) (-0.002157,-0.001657)B1067.880.0541.92150.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.001875,0.0019) (-0.001875,-0.0019) (-5.17584,0) (-22.4335,0) (-54.7825,58.5384) (-54.7825,-58.5384) (-196.397,0)B1066.230.0543.2320.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.001875,0.0019) (-0.001875,-0.0019) (-5.17173,0) (-22.4226,0) (-55.797,59.3632) (-55.797,-59.3632) (-196.397,0)B11000.0542.78890.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.001875,0.0019) (-0.001875,-0.0019) (-5.17311,0) (-22.4265,0) (-55.4579,59.0829) (-55.4579,-59.0829) (-196.397,0)B12000.050.000979810.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00198198,0.00194653) (-0.00198198,-0.00194653) (-2.554,0) (-2.554,0) (-53.7531,-27.6) (-53.7531,27.6) (-114.779,0)B12000.050.0009049680.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00194523,0.00197718) (-0.00194523,-0.00197718) (-2.53145,0) (-2.53145,0) (-47.6802,-27.6) (-47.6802,27.6) (-129.822,0)B12000.050.0009633570.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00196535,0.00196054) (-0.00196535,-0.00196054) (-2.554,0) (-2.554,0) (-69.7727,-22.759) (-69.7727,22.759) (-76.5026,0)B1146.720.050.0009826160.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00198368,0.0019475) (-0.00198368,-0.0019475) (-2.554,0) (-2.554,0) (-70.42,-23.1252) (-70.42,23.1252) (-76.5006,0)B1119.360.050.0009739830.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00188232,0.00201795) (-0.00188232,-0.00201795) (-2.554,0) (-2.554,0) (-70.1579,-22.9091) (-70.1579,22.9091) (-76.5005,0)B1190.280.050.0009797890.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.001949,0.00197223) (-0.001949,-0.00197223) (-2.554,0) (-2.554,0) (-70.3367,-23.0475) (-70.3367,23.0475) (-76.5009,0)B1091.750.0540.04930.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.001875,0.0019) (-0.001875,-0.0019) (-5.1841,0) (-21.7,0) (-54,58.5) (-54,-58.5) (-196.397,0)B1079.430.0540.04930.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.001875,0.0019) (-0.001875,-0.0019) (-5.1841,0) (-21.7,0) (-54,58.5) (-54,-58.5) (-196.397,0)B1098.90.0540.04930.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.001875,0.0019) (-0.001875,-0.0019) (-5.1841,0) (-21.7,0) (-54,58.5) (-54,-58.5) (-196.397,0)B2078.820.052.777240.0556(0,0) (0,0) (-54.7662,0) (-0.017463,0) (-0.017463,0)(-0.00272177,2.30863e-06) (-0.00272177,-2.30863e-06) (-0.0147095,0.00424008) (-0.0147095,-0.00424008) (-3.84976,11.9437) (-3.84976,-11.9437)B1980.370.053.811550.0556(0,0) (0,0) (-63.2811,0) (-0.0639373,0) (-0.0639373,0)(-0.00211218,0.00160248) (-0.00211218,-0.00160248) (-0.0641069,0.000527537) (-0.0641069,-0.000527537) (-6.54181,14.0509) (-6.54181,-14.0509)B2341.820.053.079140.0556(0,0) (0,0) (-63.2812,0) (-0.0751317,0) (-0.0751317,0)(-0.00189985,0.00184095) (-0.00189985,-0.00184095) (-0.0748017,0.00282447) (-0.0748017,-0.00282447) (-5.92851,12.6809) (-5.92851,-12.6809)B11000.0540.05170.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.00190427,0.0018893) (-0.00190427,-0.0018893) (-5.1841,0) (-21.7,0) (-54,58.5) (-54,-58.5) (-196.397,0)B11000.0540.0520.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.00190751,0.00188825) (-0.00190751,-0.00188825) (-5.1841,0) (-21.7,0) (-54,58.5) (-54,-58.5) (-196.397,0)B11000.0540.05190.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.00190643,0.0018886) (-0.00190643,-0.0018886) (-5.1841,0) (-21.7,0) (-54,58.5) (-54,-58.5) (-196.397,0)B2534.40.0510.84860.0567(0,0) (0,0) (-18.75,0) (-0.0242718,0) (-0.0242718,0) (-7.5,0)(-0.00192928,0.00193532) (-0.00192928,-0.00193532) (-0.0242777,0) (-0.0242777,0) (-11.4415,5.95119) (-11.4415,-5.95119) (-9.17152,0)B2441.180.0511.11720.0567(0,0) (0,0) (-18.7499,0) (-0.0242718,0) (-0.0242718,0) (-8.37732,0)(-0.00190574,0.00194425) (-0.00190574,-0.00194425) (-0.0241353,0.00242262) (-0.0241353,-0.00242262) (-11.6175,6.6823) (-11.6175,-6.6823) (-9.75473,0)B2519.990.058.669140.0567(0,0) (0,0) (-18.7487,0) (-0.0242718,0) (-0.0242718,0) (-15,0)(-0.00194087,0.00194145) (-0.00194087,-0.00194145) (-0.0241136,0.00187204) (-0.0241136,-0.00187204) (-9.57717,13.7152) (-9.57717,-13.7152) (-8.74032,0)B2686.910.056.600060.0556(0,0) (0,0) (-18.75,0) (-0.0242718,0) (-0.0242718,0)(-0.00190661,0.00190719) (-0.00190661,-0.00190719) (-0.0243504,0.0010898) (-0.0243504,-0.0010898) (-5.67337,9.56251) (-5.67337,-9.56251)B3602.380.053.402470.0556(0,0) (0,0) (-18.75,0) (-0.0242718,0) (-0.0242718,0)(-0.00189453,0.001922) (-0.00189453,-0.001922) (-0.0244248,0) (-0.0244248,0) (-4.67708,6.46306) (-4.67708,-6.46306)B2756.330.056.321560.0556(0,0) (0,0) (-18.75,0) (-0.0242718,0) (-0.0242718,0)(-0.00191803,0.00190237) (-0.00191803,-0.00190237) (-0.02442,0) (-0.02442,0) (-5.51619,9.37139) (-5.51619,-9.37139)B2015.20.0529.14230.0567(0,0) (0,0) (-5.07571,0) (-76.0675,0) (-22.8292,0) (-493.38,0)(-0.00195649,0.00195999) (-0.00195649,-0.00195999) (-5.31418,0) (-22.5655,0) (-28.6527,48.0758) (-28.6527,-48.0758) (-337.408,0)B19780.0529.6110.0567(0,0) (0,0) (-5.0779,0) (-73.9164,0) (-22.8292,0) (-493.38,0)(-0.00195649,0.00195999) (-0.00195649,-0.00195999) (-5.31418,0) (-22.4568,0) (-28.7322,47.785) (-28.7322,-47.785) (-337.408,0)B2002.60.0529.45190.0567(0,0) (0,0) (-5.07718,0) (-74.64,0) (-22.8292,0) (-493.38,0)(-0.00195649,0.00195999) (-0.00195649,-0.00195999) (-5.31418,0) (-22.4936,0) (-28.7045,47.8854) (-28.7045,-47.8854) (-337.408,0)B1499.590.051.13507e+130.05611(0,0) (0,0) (-2.40561,0) (-42.1602,0) (-73.7047,-68.5162) (-73.7047,68.5162)(-0.00589567,0.00591414) (-0.00589567,-0.00591414) (-2.55039,0) (-23.1326,-58.9601) (-23.1326,58.9601) (-29.8257,0) (-82.8083,0) (-1675.9,-1599.51) (-1675.9,1599.51) (-2116.76,0) (-40.6,0)B1505.720.051.12997e+130.05611(0,0) (0,0) (-2.43079,0) (-42.1602,0) (-73.7047,-68.5162) (-73.7047,68.5162)(-0.00589567,0.00591414) (-0.00589567,-0.00591414) (-2.5775,0) (-22.816,-58.9251) (-22.816,58.9251) (-29.8257,0) (-82.8083,0) (-1675.9,-1599.51) (-1675.9,1599.51) (-2116.76,0) (-40.6,0)B1513.560.051.13161e+130.05611(0,0) (0,0) (-2.42253,0) (-42.1602,0) (-73.7047,-68.5162) (-73.7047,68.5162)(-0.00589567,0.00591414) (-0.00589567,-0.00591414) (-2.5686,0) (-22.9186,-58.9364) (-22.9186,58.9364) (-29.8257,0) (-82.8083,0) (-1675.9,-1599.51) (-1675.9,1599.51) (-2116.76,0) (-40.6,0)B90253000.050.04064260.0502(-0.0051925,-0.207635) (-0.0051925,0.207635)B1187.40.050.001037660.0587(0,0) (0,0) (-2.51318,0) (-2.51318,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00198185,0.00194302) (-0.00198185,-0.00194302) (-2.554,0) (-2.554,0) (-46.5795,-27.6) (-46.5795,27.6) (-153,0)B11790.050.001049490.0587(0,0) (0,0) (-2.51189,0) (-2.51189,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.0018761,0.00202145) (-0.0018761,-0.00202145) (-2.554,0) (-2.554,0) (-46.9137,-27.6) (-46.9137,27.6) (-153,0)B1193.040.050.001041610.0587(0,0) (0,0) (-2.51274,0) (-2.51274,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.0019455,0.00197075) (-0.0019455,-0.00197075) (-2.554,0) (-2.554,0) (-46.6912,-27.6) (-46.6912,27.6) (-153,0)B1976.80.0526.53440.0567(0,0) (0,0) (-5.05682,0) (-77.7737,0) (-22.8292,0) (-493.38,0)(-0.00195649,0.00195999) (-0.00195649,-0.00195999) (-5.31418,0) (-23.0231,0) (-27.2452,45.8807) (-27.2452,-45.8807) (-337.408,0)B1980.80.0526.7550.0567(0,0) (0,0) (-5.05335,0) (-75.1221,0) (-22.8292,0) (-493.38,0)(-0.00195649,0.00195999) (-0.00195649,-0.00195999) (-5.31418,0) (-22.9236,0) (-27.1704,45.2219) (-27.1704,-45.2219) (-337.408,0)B1992.60.0526.68330.0567(0,0) (0,0) (-5.05451,0) (-75.9912,0) (-22.8292,0) (-493.38,0)(-0.00195649,0.00195999) (-0.00195649,-0.00195999) (-5.31418,0) (-22.9572,0) (-27.1962,45.4401) (-27.1962,-45.4401) (-337.408,0)B1075.250.0540.05040.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.00188424,0.00189101) (-0.00188424,-0.00189101) (-5.1841,0) (-21.7,0) (-54,58.5) (-54,-58.5) (-196.397,0)B1079.980.0540.05010.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.001881,0.00189186) (-0.001881,-0.00189186) (-5.1841,0) (-21.7,0) (-54,58.5) (-54,-58.5) (-196.397,0)B1088.780.0540.05020.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.00188195,0.0018922) (-0.00188195,-0.0018922) (-5.1841,0) (-21.7,0) (-54,58.5) (-54,-58.5) (-196.397,0)B1074.580.0546.88770.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.00187747,0.0018558) (-0.00187747,-0.0018558) (-5.17172,0) (-21.826,0) (-62.1821,59.3986) (-62.1821,-59.3986) (-196.397,0)B1069.840.0544.62270.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.00185132,0.00188232) (-0.00185132,-0.00188232) (-5.17467,0) (-21.7989,0) (-59.4106,59.2727) (-59.4106,-59.2727) (-196.397,0)B1090.830.0545.36220.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.00186008,0.00187354) (-0.00186008,-0.00187354) (-5.17368,0) (-21.8082,0) (-60.3164,59.3267) (-60.3164,-59.3267) (-196.397,0)B1602.540.053.15030.0556(0,0) (0,0) (-28.125,0) (-0.0361726,0) (-0.0361726,0)(-0.00202497,0.00185584) (-0.00202497,-0.00185584) (-0.0366786,0.000386187) (-0.0366786,-0.000386187) (-5.89267,7.28062) (-5.89267,-7.28062)B2096.350.052.547350.0556(0,0) (0,0) (-28.125,0) (-0.0359525,0) (-0.0359525,0)(-0.00200666,0.00184787) (-0.00200666,-0.00184787) (-0.035946,0.00283842) (-0.035946,-0.00283842) (-5.30646,6.59741) (-5.30646,-6.59741)B1923.360.052.390750.0556(0,0) (0,0) (-28.125,0) (-0.0242718,0) (-0.0242718,0)(-0.0019776,0.00192651) (-0.0019776,-0.00192651) (-0.0242139,0) (-0.0242139,0) (-4.89722,6.5813) (-4.89722,-6.5813)B12000.050.001009450.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00199044,0.00195819) (-0.00199044,-0.00195819) (-2.554,0) (-2.554,0) (-72.3673,-20.1563) (-72.3673,20.1563) (-76.5073,0)B12000.050.001003470.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00190148,0.002029) (-0.00190148,-0.002029) (-2.554,0) (-2.554,0) (-71.8053,-21.3498) (-71.8053,21.3498) (-76.5002,0)B12000.050.001007520.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00195947,0.00198402) (-0.00195947,-0.00198402) (-2.554,0) (-2.554,0) (-72.1831,-20.5539) (-72.1831,20.5539) (-76.5098,0)B1485.30.050.0009574160.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-76.5,0) (-96.3,0) (-83,-153) (-83,153)(-0.00589256,0.00589255) (-0.00589256,-0.00589255) (-2.554,0) (-2.554,0) (-38.8235,-22.4235) (-38.8235,22.4235) (-101.828,0)B1485.60.050.0009358720.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-76.5,0) (-96.3,0) (-83,-153) (-83,153)(-0.00589256,0.00589255) (-0.00589256,-0.00589255) (-2.554,0) (-2.554,0) (-38.2452,-22.8881) (-38.2452,22.8881) (-100.715,0)B1488.30.050.0009501640.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-76.5,0) (-96.3,0) (-83,-153) (-83,153)(-0.00589256,0.00589255) (-0.00589256,-0.00589255) (-2.554,0) (-2.554,0) (-38.6294,-22.5822) (-38.6294,22.5822) (-101.455,0)B2157.120.055.700760.0556(0,0) (0,0) (-28.125,0) (-0.0222331,0) (-0.0222331,0)(-0.00208632,0.0015631) (-0.00208632,-0.0015631) (-0.0217107,0.00753252) (-0.0217107,-0.00753252) (-6.7777,10.8457) (-6.7777,-10.8457)B2232.480.054.09750.0556(0,0) (0,0) (-42.1875,0) (-0.0226567,0) (-0.0226567,0)(-0.00205703,0.00153211) (-0.00205703,-0.00153211) (-0.0214427,0.00991763) (-0.0214427,-0.00991763) (-6.68317,11.6302) (-6.68317,-11.6302)B2681.520.055.701490.0556(0,0) (0,0) (-28.125,0) (-0.0360362,0) (-0.0360362,0)(-0.00194595,0.00188644) (-0.00194595,-0.00188644) (-0.0355332,0.00486375) (-0.0355332,-0.00486375) (-6.68769,10.8389) (-6.68769,-10.8389)B12000.050.0009998860.0587(0,0) (0,0) (-2.5119,0) (-2.5119,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00198094,0.00194617) (-0.00198094,-0.00194617) (-2.554,0) (-2.554,0) (-47.6661,-27.6) (-47.6661,27.6) (-142.31,0)B12000.050.0009865570.0587(0,0) (0,0) (-2.51014,0) (-2.51014,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00187699,0.00202607) (-0.00187699,-0.00202607) (-2.554,0) (-2.554,0) (-48.3692,-27.6) (-48.3692,27.6) (-137.203,0)B12000.050.0009953090.0587(0,0) (0,0) (-2.51132,0) (-2.51132,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00194552,0.00197418) (-0.00194552,-0.00197418) (-2.554,0) (-2.554,0) (-47.8983,-27.6) (-47.8983,27.6) (-140.579,0)B1148.660.050.0009772440.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00198175,0.00193528) (-0.00198175,-0.00193528) (-2.54321,0) (-2.54321,0) (-44.9818,-27.0499) (-44.9818,27.0499) (-153,0)B1145.460.050.0009468010.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00187232,0.002021) (-0.00187232,-0.002021) (-2.54076,0) (-2.54076,0) (-44.429,-26.4813) (-44.429,26.4813) (-153,0)B1206.480.050.0009671830.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00194428,0.00196552) (-0.00194428,-0.00196552) (-2.5424,0) (-2.5424,0) (-44.8053,-26.8553) (-44.8053,26.8553) (-153,0)B1169.880.050.001003420.0587(0,0) (0,0) (-2.17292,0) (-2.17292,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00198588,0.001938) (-0.00198588,-0.001938) (-2.20876,0) (-2.20876,0) (-45.5137,-27.6) (-45.5137,27.6) (-153,0)B1173.840.050.0009967730.0587(0,0) (0,0) (-2.18127,0) (-2.18127,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00188429,0.00201188) (-0.00188429,-0.00201188) (-2.21845,0) (-2.21845,0) (-45.2819,-27.6) (-45.2819,27.6) (-153,0)B1213.80.050.001001190.0587(0,0) (0,0) (-2.17712,0) (-2.17712,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00195134,0.00196382) (-0.00195134,-0.00196382) (-2.21343,0) (-2.21343,0) (-45.4361,-27.6) (-45.4361,27.6) (-153,0)B2381.30.0524.52140.0524(0,0) (0,0)(-0.000763942,0) (-0.0117108,0) (-3.10261,3.77449) (-3.10261,-3.77449)B2356.620.0521.62020.0524(0,0) (0,0)(-0.000763942,0) (-0.0115014,0) (-2.82285,3.61934) (-2.82285,-3.61934)B1123.790.0523.07480.0524(0,0) (0,0)(-0.000763942,0) (-0.0116973,0) (-2.5933,3.96753) (-2.5933,-3.96753)B2744.060.056.760630.0556(0,0) (0,0) (-18.75,0) (-0.0242718,0) (-0.0242718,0)(-0.00196012,0.00188382) (-0.00196012,-0.00188382) (-0.0243226,0) (-0.0243226,0) (-6.35281,9.28915) (-6.35281,-9.28915)B2745.750.056.501380.0556(0,0) (0,0) (-18.75,0) (-0.0242718,0) (-0.0242718,0)(-0.00195228,0.00188171) (-0.00195228,-0.00188171) (-0.0243386,0) (-0.0243386,0) (-5.59044,9.51356) (-5.59044,-9.51356)B3454.140.056.108220.0556(0,0) (0,0) (-18.75,0) (-0.0242718,0) (-0.0242718,0)(-0.00195014,0.00189595) (-0.00195014,-0.00189595) (-0.0243374,0) (-0.0243374,0) (-5.93136,8.90064) (-5.93136,-8.90064)B20000.0523.9950.0567(0,0) (0,0) (-5.04597,0) (-75.7602,0) (-22.8292,0) (-493.38,0)(-0.00195649,0.00195999) (-0.00195649,-0.00195999) (-5.31418,0) (-22.6997,0) (-25.3758,43.5261) (-25.3758,-43.5261) (-337.408,0)B1136.160.050.0009487010.0587(0,0) (0,0) (-2.21314,0) (-2.21314,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00198017,0.00193492) (-0.00198017,-0.00193492) (-2.2496,0) (-2.2496,0) (-47.2855,-27.6) (-47.2855,27.6) (-136.728,0)B1146.120.050.001001190.0587(0,0) (0,0) (-2.22083,0) (-2.22083,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.0018747,0.00201165) (-0.0018747,-0.00201165) (-2.25911,0) (-2.25911,0) (-46.1261,-27.6) (-46.1261,27.6) (-149.52,0)B1197.480.050.000965330.0587(0,0) (0,0) (-2.21454,0) (-2.21454,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00194383,0.00196213) (-0.00194383,-0.00196213) (-2.25159,0) (-2.25159,0) (-46.8882,-27.6) (-46.8882,27.6) (-140.826,0)B1180.80.050.0009275240.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00199792,0.00193372) (-0.00199792,-0.00193372) (-2.554,0) (-2.554,0) (-72.6404,-28.3263) (-72.6404,28.3263) (-65.2565,0)B1209.840.050.0009637720.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00195852,0.00196434) (-0.00195852,-0.00196434) (-2.554,0) (-2.554,0) (-70.478,-20.5339) (-70.478,20.5339) (-76.5001,0)B1162.20.050.0008114650.0587(0,0) (0,0) (-3.74999,0) (-3.74999,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00191603,0.00201164) (-0.00191603,-0.00201164) (-3.77808,0) (-3.77808,0) (-64.6981,-21.8973) (-64.6981,21.8973) (-76.5069,0)B11000.0540.05040.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.00188691,0.00189414) (-0.00188691,-0.00189414) (-5.1841,0) (-21.7,0) (-54,58.5) (-54,-58.5) (-196.397,0)B11000.0540.05170.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.00189931,0.00188456) (-0.00189931,-0.00188456) (-5.1841,0) (-21.7,0) (-54,58.5) (-54,-58.5) (-196.397,0)B11000.0540.05130.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.0018952,0.00188771) (-0.0018952,-0.00188771) (-5.1841,0) (-21.7,0) (-54,58.5) (-54,-58.5) (-196.397,0)B2889.910.05166.5970.0568(0,0) (0,0) (-6.91087,0) (-0.00571469,0) (-0.00571469,0) (-0.00595572,0)(-0.00167,0) (-0.00167,0) (-0.00479341,0.00419361) (-0.00479341,-0.00419361) (-5.20518,11.494) (-5.20518,-11.494) (-7.16857,0) (-0.0114148,0)B3340.020.055.30830.0556(0,0) (0,0) (-18.75,0) (-0.0360012,0) (-0.0360012,0)(-0.00190858,0.0018877) (-0.00190858,-0.0018877) (-0.0359413,0.00163072) (-0.0359413,-0.00163072) (-5.47617,8.34717) (-5.47617,-8.34717)B2914.470.05169.2290.0568(0,0) (0,0) (-5.01576,0) (-0.00388413,0) (-0.00388413,0) (-0.00421888,0)(-0.00149855,0) (-0.00149855,0) (-0.00295903,0.00322268) (-0.00295903,-0.00322268) (-5.16266,11.7379) (-5.16266,-11.7379) (-5.1505,0) (-0.00764496,0)B1093.40.0540.05030.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.00188715,0.00189467) (-0.00188715,-0.00189467) (-5.1841,0) (-21.7,0) (-54,58.5) (-54,-58.5) (-196.397,0)B1088.010.0540.0510.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.00189316,0.00188921) (-0.00189316,-0.00188921) (-5.1841,0) (-21.7,0) (-54,58.5) (-54,-58.5) (-196.397,0)B1099.450.0540.05080.0567(0,0) (0,0) (-5.03,0) (-55.7043,0) (-25.4648,0) (-490.038,0)(-0.00189117,0.00189099) (-0.00189117,-0.00189099) (-5.1841,0) (-21.7,0) (-54,58.5) (-54,-58.5) (-196.397,0)B2352.630.05313.4960.0525(0,0) (0,0)(-0.000758391,0) (-0.0113488,0) (-3.76393,4.28951) (-3.76393,-4.28951) (-9.38628,0)B2396.950.05312.9580.0525(0,0) (0,0)(-0.000759261,0) (-0.0113902,0) (-3.8158,4.29914) (-3.8158,-4.29914) (-9.23354,0)B2286.040.05310.9050.0525(0,0) (0,0)(-0.000761035,0) (-0.0115003,0) (-3.7628,4.23072) (-3.7628,-4.23072) (-9.45039,0)B1184.40.0512475.10.0557(0,0) (0,0) (-12.5314,0) (-25.4807,0) (-510.408,0)(-0.00281693,0.00280107) (-0.00281693,-0.00280107) (-16.437,0) (-22.081,34.122) (-22.081,-34.122) (-45.7932,269.778) (-45.7932,-269.778)B1163.040.0513398.50.0557(0,0) (0,0) (-12.642,0) (-25.4807,0) (-510.408,0)(-0.00281693,0.00280107) (-0.00281693,-0.00280107) (-16.3526,0) (-22.9987,35.5954) (-22.9987,-35.5954) (-49.3717,269.384) (-49.3717,-269.384)B1180.440.0513094.20.0557(0,0) (0,0) (-12.6242,0) (-25.4807,0) (-510.408,0)(-0.00281693,0.00280107) (-0.00281693,-0.00280107) (-16.4303,0) (-22.7676,35.1128) (-22.7676,-35.1128) (-49.9492,268.732) (-49.9492,-268.732)B2804.120.057.669990.0556(0,0) (0,0) (-12.5,0) (-0.0242718,0) (-0.0242718,0)(-0.00195798,0.00192548) (-0.00195798,-0.00192548) (-0.0242308,0.00188114) (-0.0242308,-0.00188114) (-6.08456,7.67978) (-6.08456,-7.67978)B2820.010.057.044470.0556(0,0) (0,0) (-12.5,0) (-0.0242718,0) (-0.0242718,0)(-0.00196203,0.00191298) (-0.00196203,-0.00191298) (-0.0242393,0.00108483) (-0.0242393,-0.00108483) (-5.85749,7.33523) (-5.85749,-7.33523)B3595.030.056.090160.0556(0,0) (0,0) (-12.5,0) (-0.0242718,0) (-0.0242718,0)(-0.00195615,0.00192368) (-0.00195615,-0.00192368) (-0.0242412,0) (-0.0242412,0) (-5.36711,6.88148) (-5.36711,-6.88148)B2591.380.056.843150.0556(0,0) (0,0) (-18.75,0) (-0.0242718,0) (-0.0242718,0)(-0.00191678,0.00189034) (-0.00191678,-0.00189034) (-0.0244278,0) (-0.0244278,0) (-6.18652,9.47191) (-6.18652,-9.47191)B2862.650.054.44140.0556(0,0) (0,0) (-28.125,0) (-0.0242718,0) (-0.0242718,0)(-0.00188317,0.00192122) (-0.00188317,-0.00192122) (-0.0242901,0) (-0.0242901,0) (-5.71637,9.60258) (-5.71637,-9.60258)B2089.320.058.037460.0556(0,0) (0,0) (-16.8798,0) (-0.0242718,0) (-0.0242718,0)(-0.00191014,0.00189993) (-0.00191014,-0.00189993) (-0.0243516,0) (-0.0243516,0) (-6.64766,9.55552) (-6.64766,-9.55552)B15000.050.001164590.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00589256,0.00589255) (-0.00589256,-0.00589255) (-2.554,0) (-2.554,0) (-54,-18.4) (-54,18.4) (-153,0)B19850.0521.7410.0567(0,0) (0,0) (-5.03803,0) (-81.3064,0) (-22.8292,0) (-493.38,0)(-0.00195649,0.00195999) (-0.00195649,-0.00195999) (-5.31418,0) (-23.1331,0) (-23.9958,42.9246) (-23.9958,-42.9246) (-337.408,0)B1975.80.0521.87370.0567(0,0) (0,0) (-5.03594,0) (-81.3948,0) (-22.8292,0) (-493.38,0)(-0.00195649,0.00195999) (-0.00195649,-0.00195999) (-5.31418,0) (-23.168,0) (-24.154,42.9842) (-24.154,-42.9842) (-337.408,0)B1975.80.0521.82850.0567(0,0) (0,0) (-5.03664,0) (-81.3721,0) (-22.8292,0) (-493.38,0)(-0.00195649,0.00195999) (-0.00195649,-0.00195999) (-5.31418,0) (-23.1566,0) (-24.1011,42.9653) (-24.1011,-42.9653) (-337.408,0)B1200.720.050.000873450.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00198717,0.00195432) (-0.00198717,-0.00195432) (-2.554,0) (-2.554,0) (-65.7419,-23.6838) (-65.7419,23.6838) (-76.5087,0)B1219.80.050.0008595320.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00188484,0.0020231) (-0.00188484,-0.0020231) (-2.554,0) (-2.554,0) (-64.4443,-25.5685) (-64.4443,25.5685) (-76.5021,0)B1253.520.050.0008687510.0587(0,0) (0,0) (-2.5,0) (-2.5,0) (-153,0) (-96.3,0) (-83,-153) (-83,153)(-0.00195215,0.00197846) (-0.00195215,-0.00197846) (-2.554,0) (-2.554,0) (-65.3056,-24.3439) (-65.3056,24.3439) (-76.5033,0)10132.6132.6167A-3.65342e-17 3.67488e-08 -4.2706e-07 1.14502e-06 -1.87594e-07 -3.37274e-07 2.78747e-06 -3.74403e-06 5.41172e-06 7.47336e-06 -0.000517759 0.000210677 4.63258e-05 -0.000608222 0.00144175 -0.00240627 0.00322534 -0.00350639 0.00281441 -0.000771971 -0.00280512 0.00777805 -0.0135815 0.0191765 -0.0229704 0.0240398 -0.0220986 0.00860734 0.0117525 -0.0447787 0.0964923 -0.191755 0.527652 0.724167 -0.156905 0.0442574 0.00314168 -0.0266714 0.0361532 -0.0385687 0.0310842 -0.0235259 0.0153211 -0.00740398 0.00109645 0.00309797 -0.0051932 0.00556131 -0.0047611 0.00338213 -0.00192052 0.000715218 7.67719e-05 -0.000451897 0.0005027 -0.000565037 -5.568e-05 1.57736e-05 -1.41985e-06 8.14909e-07 6.80795e-07 -1.25273e-06 1.52435e-06 -2.83336e-07 -1.06384e-08 1.25712e-09 -5.42954e-1110117.21817.21839A4.18952e-13 0.000330318 0.00102921 -0.00314123 0.000205709 0.00152521 -0.00623193 0.0104801 -0.0131202 0.0107821 -0.00144455 -0.0158729 0.0395074 -0.0651036 0.0853716 -0.0891913 0.0500619 0.837233 0.266723 -0.166693 0.095284 -0.0509218 0.0161458 0.00706362 -0.0183877 0.0199414 -0.0154895 0.00852735 -0.00255789 -0.00181103 0.00242649 -0.00375769 0.000467293 0.000633072 -1.56874e-06 -1.2548e-05 3.21041e-07 -2.63324e-08 -5.09997e-0810100060C-9.7911e-05 -8.31013e-05 -0.000110414 -0.000136129 -0.000156059 -0.000164941 -0.000156797 -0.000124876 -6.19389e-05 3.857e-05 0.000183125 0.000376479 0.000621579 0.000918587 0.00126453 0.00165263 0.00207198 0.00250753 0.00293989 0.00334597 0.0036995 0.00397216 0.0041347 0.00415859 0.00401767 0.00369003 0.00315997 0.00241977 0.0014714 0.000327844 -0.000985875 -0.00243229 -0.00396157 -0.00551224 -0.00701259 -0.00838275 -0.00953732 -0.0103886 -0.01085 -0.0108404 -0.0102875 -0.00913213 -0.00733187 -0.0048639 -0.00172774 0.00205315 0.00643065 0.0113322 0.0166621 0.022304 0.0281242 0.0339759 0.0397042 0.0451513 0.0501626 0.0545924 0.0583091 0.0612008 0.0631795 0.06418431986-01-01T00:00:00.0000ZGlobal Seismograph Network - IRIS/IDA (GSN)falsetrue{"type":"DOI","value":"10.7914/SN/II"}FDSNXML:Identifier/01990-10-12T00:00:00.0000ZAla Archa, Kyrgyzstan42.637574.49421633.1falsetrue2018-06-07T00:00:00.0000Z42.637574.49421633.12018-06-07T00:00:00.0000Zxxxx0yyyy0201300-9056670100000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2020-05-25T12:00:00.0000Zxxxx1yyyy1201304.8043799900000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2020-05-25T12:00:00.0000Zxxxx2yyyy22013095.5044852500000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2007-04-04T00:00:00.0000ZAmbohimpanompo, Madagascar-19.01847.2291528falsetrue2021-08-18T07:46:00.0000Z-19.01847.22915282021-08-18T07:46:00.0000Zxxxx1yyyy14015.3359.1033360600000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor2021-08-18T07:46:00.0000Zxxxx2yyyy24015.388.8033347200000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor2021-08-18T07:46:00.0000Zxxxx0yyyy04015.30-9033427500000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor1990-02-19T00:00:00.0000ZAlert, NU, Canada82.5033-62.3560falsetrue2021-11-07T18:05:00.0000Z82.5033-62.35602021-11-07T18:05:00.0000Zxxxx1yyyy12010358043977800000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2021-11-07T18:05:00.0000Zxxxx2yyyy2201087.5044764500000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2021-11-07T18:05:00.0000Zxxxx0yyyy020100-9042082200000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2018-09-09T00:00:00.0000ZArti, Russia56.387958.3849320falsetrue2018-09-09T00:00:00.0000Z56.387958.38493202018-09-09T00:00:00.0000Zxxxx2yyyy22014.590038287500000.05m/sCGfalse{"description":"Streckeisen STS-1H/VBB Seismometer"}FDSNXML:Sensor2018-09-09T00:00:00.0000Zxxxx0yyyy02014.50-9038730600000.05m/sCGfalse{"description":"Streckeisen STS-1V/VBB Seismometer"}FDSNXML:Sensor2023-03-20T12:30:00.0000Zxxxx1yyyy12014.50038762500000.05m/sCGfalse{"description":"Streckeisen STS-1H/VBB Seismometer"}FDSNXML:Sensor1994-10-01T00:00:00.0000ZButt Crater, Ascension Island-7.9327-14.3601173falsetrue2022-03-05T08:30:00.0000Z-7.9327-14.36011732022-03-05T08:30:00.0000Zxxxx1yyyy140124.3147.4018434800000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2022-03-05T08:30:00.0000Zxxxx2yyyy240124.3237.1018513800000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2022-03-05T08:30:00.0000Zxxxx0yyyy040124.30-9018250900000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor1996-05-29T00:00:00.0000ZBlack Forest Observatory, Schiltach, Germany48.33018.3296638falsetrue2019-12-03T20:00:00.0000Z48.33018.32966382019-12-03T20:00:00.0000Zxxxx2yyyy220116090040224900000.05m/sCGfalse{"description":"Streckeisen STS-1H/VBB Seismometer"}FDSNXML:Sensor2019-12-03T20:00:00.0000Zxxxx0yyyy02011600-9038302400000.05m/sCGfalse{"description":"Streckeisen STS-1V/VBB Seismometer"}FDSNXML:Sensor2022-09-22T15:26:00.0000Zxxxx1yyyy12011600040761000000.05m/sCGfalse{"description":"Streckeisen STS-1H/VBB Seismometer"}FDSNXML:Sensor1994-07-30T00:00:00.0000ZBorgarfjordur, Asbjarnarstadir, Iceland64.7474-21.3268110falsetrue2018-07-30T10:00:00.0000Z64.7474-21.32681102018-07-30T10:00:00.0000Zxxxx1yyyy140195249.9017853600000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2018-07-30T10:00:00.0000Zxxxx2yyyy240195339.6017826000000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2018-07-30T10:00:00.0000Zxxxx0yyyy0401950-9018390600000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2019-08-22T00:00:00.0000ZBurabay, Kazakhstan53.046170.3184334falsetrue2019-08-22T00:00:00.0000Z53.046170.31843342019-08-22T00:00:00.0000Zxxxx1yyyy140142.5323019862100000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2019-08-22T00:00:00.0000Zxxxx2yyyy240142.553019862100000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2019-08-22T00:00:00.0000Zxxxx0yyyy040142.50-9019862100000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor1996-03-10T00:00:00.0000ZCha de Macela, Sao Miguel Island, Azores37.7637-25.5243429falsetrue2019-06-07T00:00:00.0000Z37.7637-25.52434292019-06-07T00:00:00.0000Zxxxx1yyyy140188.4300018980200000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2019-06-07T00:00:00.0000Zxxxx2yyyy240188.431.1018527400000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2019-06-07T00:00:00.0000Zxxxx0yyyy040188.40-9019701200000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor1996-12-15T00:00:00.0000ZWest Island, Cocos (Keeling) Islands-12.190196.83491falsetrue2018-03-19T12:00:00.0000Z-12.190196.834912018-03-19T12:00:00.0000Zxxxx1yyyy140144.857.7018252700000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2018-03-19T12:00:00.0000Zxxxx2yyyy240144.8147.7018046700000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2018-03-19T12:00:00.0000Zxxxx0yyyy040144.80-9018372200000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2004-01-22T00:00:00.0000ZDiego Garcia, Chagos Islands, Indian Ocean-7.412172.45251falsetrue2022-05-29T06:00:00.0000Z-7.412172.452512022-05-29T06:00:00.0000Zxxxx1yyyy12012359.5032916700000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2022-05-29T06:00:00.0000Zxxxx2yyyy2201289.9031357900000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2022-05-29T06:00:00.0000Zxxxx0yyyy020120-9037081300000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor1996-02-16T00:00:00.0000ZMount Kent, East Falkland Island-51.6753-58.0637110falsetrue2018-02-26T20:30:00.0000Z-51.6753-58.06371102018-02-26T20:30:00.0000Zxxxx1yyyy140164320.4018390600000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2018-02-26T20:30:00.0000Zxxxx2yyyy24016450.3018390600000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2018-02-26T20:30:00.0000Zxxxx0yyyy0401640-9018390600000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor1990-05-21T00:00:00.0000ZErimo, Hokkaido Island, Japan42.015143.157240falsetrue2021-10-21T05:00:00.0000Z42.015143.1572402021-10-21T05:00:00.0000Zxxxx1yyyy12010359.9040130600000.05m/sCGfalse{"description":"Metrozet M2166-VBH Seismometer"}FDSNXML:Sensor2021-10-21T05:00:00.0000Zxxxx2yyyy2201087.8038654600000.05m/sCGfalse{"description":"Metrozet M2166-VBH Seismometer"}FDSNXML:Sensor2021-10-21T05:00:00.0000Zxxxx0yyyy020100-9039902400000.05m/sCGfalse{"description":"Metrozet M2166-VBV Seismometer"}FDSNXML:Sensor1987-11-13T00:00:00.0000ZEskdalemuir, Scotland, UK55.3167-3.205242falsetrue2017-01-29T00:00:00.0000Z55.3167-3.2052422017-01-29T00:00:00.0000Zxxxx2yyyy2201089.5042545500000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2017-01-29T00:00:00.0000Zxxxx0yyyy020100-9057041400000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2021-02-24T00:00:00.0000Zxxxx1yyyy120100.1043644700000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor1993-08-28T00:00:00.0000ZFlin Flon, Canada54.725-101.9783338falsetrue2021-09-27T13:35:00.0000Z54.725-101.97833382021-09-27T13:35:00.0000Zxxxx1yyyy140102.9033691600000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor2021-09-27T13:35:00.0000Zxxxx2yyyy2401092.9033069700000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor2021-09-27T13:35:00.0000Zxxxx0yyyy040100-9033481000000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor1996-12-10T00:00:00.0000ZHope Point, South Georgia Island-54.2836-36.487920falsetrue2017-03-10T00:00:00.0000Z-54.2836-36.4879202017-03-10T00:00:00.0000Zxxxx1yyyy12010359.8024822000000.05m/sCGfalse{"description":"Streckeisen STS-2 Seismometer"}FDSNXML:Sensor2017-03-10T00:00:00.0000Zxxxx2yyyy2201090.7024923400000.05m/sCGfalse{"description":"Streckeisen STS-2 Seismometer"}FDSNXML:Sensor2017-03-10T00:00:00.0000Zxxxx0yyyy020100-9025053200000.05m/sCGfalse{"description":"Streckeisen STS-2 Seismometer"}FDSNXML:Sensor2011-04-10T00:00:00.0000ZBlack Forest Observatory, Schiltach, Germany48.3319028.3311589falsetrue2011-04-10T00:00:00.0000Z48.3319028.33115892011-04-10T00:00:00.0000Zxxxx1yyyy140100-9090253000.05M/S**2CGfalse{"description":"Interferometric Streckeisen STS-1 Seismometer"}FDSNXML:Sensor2011-04-10T00:00:00.0000Zxxxx0yyyy040100-9090253000.05M/S**2CGfalse{"description":"Interferometric Streckeisen STS-1 Seismometer"}FDSNXML:Sensor1995-04-23T00:00:00.0000ZLas Juntas de Abangares, Costa Rica10.2908-84.9525340falsetrue2021-10-08T20:00:00.0000Z10.2908-84.95253402021-10-08T20:00:00.0000Zxxxx1yyyy1401032.5019653500000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2021-10-08T20:00:00.0000Zxxxx2yyyy24010122.5019514500000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2021-10-08T20:00:00.0000Zxxxx0yyyy040100-9019746900000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2023-03-27T00:00:00.0000ZJizzax, Uzbekistan40.092867.7118517.7falsetrue2023-03-27T00:00:00.0000Z40.092867.7118517.72023-03-27T00:00:00.0000Zxxxx1yyyy140134033049600000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor2023-03-27T00:00:00.0000Zxxxx2yyyy2401394033116500000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor2023-03-27T00:00:00.0000Zxxxx0yyyy040130-9033313800000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor1999-02-06T00:00:00.0000ZKappang, Sulawesi, Indonesia-5.0142119.7517300falsetrue2018-11-13T07:00:00.0000Z-5.0142119.75173002018-11-13T07:00:00.0000Zxxxx1yyyy140161144.6017976800000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2018-11-13T07:00:00.0000Zxxxx2yyyy240161233.7018055900000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2018-11-13T07:00:00.0000Zxxxx0yyyy0401610-9018203000000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor1997-06-09T00:00:00.0000ZKodiak Island, Alaska, USA57.7828-152.5835152falsetrue2023-03-14T05:15:00.0000Z57.7828-152.58351522023-03-14T05:15:00.0000Zxxxx1yyyy140188155.2017965700000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2023-03-14T05:15:00.0000Zxxxx2yyyy240188245.3017886300000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2023-03-14T05:15:00.0000Zxxxx0yyyy0401880-9018237300000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor1994-02-01T00:00:00.0000ZKislovodsk, Russia43.955342.68631054falsetrue2022-12-23T12:00:00.0000Z43.955342.686310542022-12-23T12:00:00.0000Zxxxx1yyyy1201200025375200000.05m/sCGfalse{"description":"Streckeisen STS-1H/VBB Seismometer"}FDSNXML:Sensor2022-12-23T12:00:00.0000Zxxxx2yyyy22012090033194400000.05m/sCGfalse{"description":"Streckeisen STS-1H/VBB Seismometer"}FDSNXML:Sensor2022-12-23T12:00:00.0000Zxxxx0yyyy0201200-9030455300000.05m/sCGfalse{"description":"Streckeisen STS-1V/VBB Seismometer"}FDSNXML:Sensor1995-03-26T00:00:00.0000ZKurchatov, Kazakhstan50.715478.6202184falsetrue2019-09-01T12:00:00.0000Z50.715478.62021842019-09-01T12:00:00.0000Zxxxx1yyyy140141.1199019862100000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2019-09-01T12:00:00.0000Zxxxx2yyyy240141.1289019862100000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2019-09-01T12:00:00.0000Zxxxx0yyyy040141.10-9019862100000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2017-07-29T00:00:00.0000ZGagan, Kwajalein Atoll, Marshall Islands9.2873167.53690falsetrue2020-05-23T06:00:00.0000Z9.2873167.536902020-05-23T06:00:00.0000Zxxxx1yyyy14016295.5024584300000.05m/sCGfalse{"description":"Streckeisen STS-5A Seismometer"}FDSNXML:Sensor2020-05-23T06:00:00.0000Zxxxx2yyyy2401625.7024589300000.05m/sCGfalse{"description":"Streckeisen STS-5A Seismometer"}FDSNXML:Sensor2020-05-23T06:00:00.0000Zxxxx0yyyy040160-9024634000000.05m/sCGfalse{"description":"Streckeisen STS-5A Seismometer"}FDSNXML:Sensor1992-12-01T00:00:00.0000ZLovozero, Russia67.897934.6514630falsetrue2022-12-21T13:10:00.0000Z67.897934.65146302022-12-21T13:10:00.0000Zxxxx1yyyy12012000034156600000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2022-12-21T13:10:00.0000Zxxxx2yyyy220120090035349900000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2022-12-21T13:10:00.0000Zxxxx0yyyy02012000-9042460200000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor1999-09-22T00:00:00.0000ZMbarara, Uganda-0.601930.73821390falsetrue2019-09-22T00:00:00.0000Z-0.601930.738213902019-09-22T00:00:00.0000Zxxxx1yyyy140110017.5019862100000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2019-09-22T00:00:00.0000Zxxxx2yyyy2401100106.2019862100000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2019-09-22T00:00:00.0000Zxxxx0yyyy04011000-9019862100000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor1995-05-15T00:00:00.0000ZMahe, Seychelles-4.673755.4792475falsetrue2022-06-10T12:00:00.0000Z-4.673755.47924752022-06-10T12:00:00.0000Zxxxx1yyyy140191311.1019012300000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2022-06-10T12:00:00.0000Zxxxx2yyyy24019140.8018959400000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2022-06-10T12:00:00.0000Zxxxx0yyyy0401910-9019969400000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor1994-05-24T00:00:00.0000ZMonasavu, Fiji-17.7448178.0528801.1falsetrue2022-08-07T00:00:00.0000Z-17.7448178.0528801.12022-08-07T00:00:00.0000Zxxxx1yyyy140188.467.1019363600000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2022-08-07T00:00:00.0000Zxxxx2yyyy240188.4157.6019429100000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2022-08-07T00:00:00.0000Zxxxx0yyyy040188.40-9020090500000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor1994-12-18T00:00:00.0000ZNilore, Pakistan33.650673.2686629falsetrue2021-06-13T00:00:00.0000Z33.650673.26866292021-06-13T00:00:00.0000Zxxxx1yyyy1201680039583600000.05m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor2021-06-13T00:00:00.0000Zxxxx2yyyy22016890039173300000.05m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor2021-06-13T00:00:00.0000Zxxxx0yyyy0201680-9018680400000.05m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor1988-06-22T00:00:00.0000ZNana, Peru-11.9875-76.8422575falsetrue2018-09-05T00:00:00.0000Z-11.9875-76.84225752018-09-05T00:00:00.0000Zxxxx1yyyy1201401.3043450400000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2018-09-05T00:00:00.0000Zxxxx2yyyy22014089.1043477200000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2018-09-05T00:00:00.0000Zxxxx0yyyy0201400-9054694100000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor1988-09-14T00:00:00.0000ZObninsk, Russia55.114636.5674160falsetrue2022-03-01T11:21:00.0000Z55.114636.56741602022-03-01T11:21:00.0000Zxxxx1yyyy1401300033437500000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor2022-03-01T11:21:00.0000Zxxxx2yyyy24013090033437500000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor2022-03-01T11:21:00.0000Zxxxx0yyyy0401300-9033437500000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor2000-08-22T00:00:00.0000ZPallekele, Sri Lanka7.272880.7022460falsetrue2022-09-11T04:00:00.0000Z7.272880.70224602022-09-11T04:00:00.0000Zxxxx1yyyy14019080.2018805400000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2022-09-11T04:00:00.0000Zxxxx2yyyy240190170.2018970300000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2022-09-11T04:00:00.0000Zxxxx0yyyy0401900-9019820400000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2020-04-29T18:00:00.0000ZPinon Flat, California, USA33.6107-116.45441280falsetrue2022-02-08T18:50:00.0000Z33.6107-116.454412802022-02-08T18:50:00.0000Zxxxx1yyyy140194.5120.2019544300000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2022-02-08T18:50:00.0000Zxxxx0yyyy040194.50-9020025000000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2022-05-28T11:00:00.0000Zxxxx2yyyy240194.5210.2019236400000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor1996-01-01T00:00:00.0000ZAr Rayn, Saudi Arabia23.522545.5032631falsetrue2018-01-26T03:00:00.0000Z23.522545.50326312018-01-26T03:00:00.0000Zxxxx1yyyy140195270.1018390600000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2018-01-26T03:00:00.0000Zxxxx2yyyy240195359.9018390600000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2018-01-26T03:00:00.0000Zxxxx0yyyy0401950-9018390600000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor1987-06-15T00:00:00.0000ZRapanui, Easter Island, Chile-27.1267-109.3344110falsetrue2019-04-24T00:00:00.0000Z-27.1267-109.33441102019-04-24T00:00:00.0000Zxxxx2yyyy2201090.5045760000000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2019-04-24T00:00:00.0000Zxxxx0yyyy020100-9052887000000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2020-07-01T12:00:00.0000Zxxxx1yyyy120100.1046148800000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2000-05-29T00:00:00.0000ZSantiago Island, Cape Verde14.9702-23.6085387falsetrue2021-12-01T15:00:00.0000Z14.9702-23.60853872021-12-01T15:00:00.0000Zxxxx1yyyy140195136018280300000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2021-12-01T15:00:00.0000Zxxxx2yyyy240195225.8018190200000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor2021-12-01T15:00:00.0000Zxxxx0yyyy0401950-9018381400000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Borehole"}FDSNXML:Sensor1995-06-19T00:00:00.0000ZHorse Pasture, St. Helena Island-15.9594-5.7455537falsetrue2018-09-19T00:00:00.0000Z-15.9594-5.74555372018-09-19T00:00:00.0000Zxxxx1yyyy12016035039107000000.05m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor2018-09-19T00:00:00.0000Zxxxx2yyyy220160125039843600000.05m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor2018-09-19T00:00:00.0000Zxxxx0yyyy0201600-9038000000000.05m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor2016-10-06T00:00:00.0000ZSimiganj, Tajikistan38.658569.00831400.5falsetrue2021-05-05T00:00:00.0000Z38.658569.00831400.52021-05-05T00:00:00.0000Zxxxx1yyyy14010224019801700000.05m/sCGfalse{"description":"Nanometrics Trillium 240 Seismometer"}FDSNXML:Sensor2021-05-05T00:00:00.0000Zxxxx2yyyy24010313.9019444600000.05m/sCGfalse{"description":"Nanometrics Trillium 240 Seismometer"}FDSNXML:Sensor2021-05-05T00:00:00.0000Zxxxx0yyyy040100-9019735500000.05m/sCGfalse{"description":"Nanometrics Trillium 240 Seismometer"}FDSNXML:Sensor1990-10-30T00:00:00.0000ZSutherland, South Africa-32.379720.81171770falsetrue2020-08-12T15:00:00.0000Z-32.379720.811717702020-08-12T15:00:00.0000Zxxxx1yyyy12010358.6044401500000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2020-08-12T15:00:00.0000Zxxxx2yyyy2201087.4044653100000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2020-08-12T15:00:00.0000Zxxxx0yyyy020100-9056925100000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor1994-01-17T00:00:00.0000ZHobart, Tasmania, Australia-42.9099147.3204132falsetrue2018-09-12T12:00:00.0000Z-42.9099147.32041322018-09-12T12:00:00.0000Zxxxx0yyyy020100-9041009900000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2021-02-27T00:00:00.0000Zxxxx1yyyy12010356.3045325300000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor2021-02-27T00:00:00.0000Zxxxx2yyyy2201087.1033070800000.05m/sCGfalse{"description":"Streckeisen STS-1 Seismometer with Metrozet E300"}FDSNXML:Sensor1990-10-12T00:00:00.0000ZTalaya, Russia51.6807103.6438579falsetrue2022-02-24T18:14:01.0000Z51.6807103.64385792022-02-24T18:14:01.0000Zxxxx1yyyy1401200024827600000.05m/sCGfalse{"description":"Streckeisen STS-2.5 Seismometer"}FDSNXML:Sensor2022-02-24T18:14:01.0000Zxxxx2yyyy24012090024827600000.05m/sCGfalse{"description":"Streckeisen STS-2.5 Seismometer"}FDSNXML:Sensor2022-02-24T18:14:01.0000Zxxxx0yyyy0401200-9024827600000.05m/sCGfalse{"description":"Streckeisen STS-2.5 Seismometer"}FDSNXML:Sensor2009-05-07T00:00:00.0000ZUniv. of Sharjah, Sharjah, United Arab Emirates24.945356.2042284.4falsetrue2022-02-02T07:00:00.0000Z24.945356.2042284.42022-02-02T07:00:00.0000Zxxxx1yyyy14010178.8033186700000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor2022-02-02T07:00:00.0000Zxxxx2yyyy24010268.9033032900000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor2022-02-02T07:00:00.0000Zxxxx0yyyy040100-9033032900000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor1994-03-27T00:00:00.0000ZTennant Creek, NT, Australia-19.9336134.36366falsetrue2020-10-14T06:15:00.0000Z-19.9336134.363662020-10-14T06:15:00.0000Zxxxx1yyyy1401100356.4019874000000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2020-10-14T06:15:00.0000Zxxxx2yyyy240110087.6020189800000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2020-10-14T06:15:00.0000Zxxxx0yyyy04011000-9020747900000.05m/sCGfalse{"description":"Streckeisen STS-6 Seismometer"}FDSNXML:Sensor2007-06-26T00:00:00.0000ZPinon Flat, California, USA33.6107-116.45551280falsetrue2023-01-19T17:30:00.0000Z33.6107-116.455512802023-01-19T17:30:00.0000Zxxxx1yyyy140100033437500000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor2023-01-19T17:30:00.0000Zxxxx2yyyy2401090033437500000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor2023-01-19T17:30:00.0000Zxxxx0yyyy040100-9033437500000.05m/sCGfalse{"description":"Nanometrics Trillium 360 Seismometer - Vault"}FDSNXML:Sensor diff --git a/etc/inventory/inventory_IU.xml b/etc/inventory/inventory_IU.xml new file mode 100644 index 0000000..72720df --- /dev/null +++ b/etc/inventory/inventory_IU.xml @@ -0,0 +1,2 @@ + +Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}T120 post hole, quietm/s{"unit":"Velocity in Meters Per Second"}T120 post hole, quietm/s{"unit":"Velocity in Meters Per Second"}T120 post hole, quietm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Trillium T360 Surface Seismometerm/s{"unit":"Velocity in Meters Per Second"}Trillium T360 Surface Seismometerm/s{"unit":"Velocity in Meters Per Second"}Trillium T360 Surface Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Trillium 360m/s{"unit":"Velocity in Meters Per Second"}Trillium 360m/s{"unit":"Velocity in Meters Per Second"}Trillium 360m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Trillium T360 Surface Seismometerm/s{"unit":"Velocity in Meters Per Second"}Trillium T360 Surface Seismometerm/s{"unit":"Velocity in Meters Per Second"}Trillium T360 Surface Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Trillium T360 Surface Seismometerm/s{"unit":"Velocity in Meters Per Second"}Trillium T360 Surface Seismometerm/s{"unit":"Velocity in Meters Per Second"}Trillium T360 Surface Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 Standard-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 High-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 High-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 High-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Trillium T360 Surface Seismometerm/s{"unit":"Velocity in Meters Per Second"}Trillium T360 Surface Seismometerm/s{"unit":"Velocity in Meters Per Second"}Trillium T360 Surface Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 High-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 High-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 High-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Trillium 360m/s{"unit":"Velocity in Meters Per Second"}Trillium 360m/s{"unit":"Velocity in Meters Per Second"}Trillium 360m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 High-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 High-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2 High-gainm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Geotech KS-54000 Borehole Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-2.5m/s{"unit":"Velocity in Meters Per Second"}T120 post hole, quietm/s{"unit":"Velocity in Meters Per Second"}T120 post hole, quietm/s{"unit":"Velocity in Meters Per Second"}T120 post hole, quietm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1VBB w/E300m/s{"unit":"Velocity in Meters Per Second"}Trillium 360m/s{"unit":"Velocity in Meters Per Second"}Trillium 360m/s{"unit":"Velocity in Meters Per Second"}Trillium 360m/s{"unit":"Velocity in Meters Per Second"}Trillium T360 Surface Seismometerm/s{"unit":"Velocity in Meters Per Second"}Trillium T360 Surface Seismometerm/s{"unit":"Velocity in Meters Per Second"}Trillium T360 Surface Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-6A VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Trillium 360m/s{"unit":"Velocity in Meters Per Second"}Trillium 360m/s{"unit":"Velocity in Meters Per Second"}Trillium 360m/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}Streckeisen STS-1/VBB Seismometerm/s{"unit":"Velocity in Meters Per Second"}16777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.1114194300ResponseFIR/20230413102652.934729.94194300ResponseFIR/20230413102652.934729.94194300ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.934729.916777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.11116777200ResponseFIR/20230413102652.947944.111A11830.025.67858e-200.0287(0,0) (0,0) (-4.1617,0) (-4.1617,0) (-4967370,0) (-0.648851,-7716220) (-0.648851,7716220) (-10531300,0)(-0.0123583,-0.0122235) (-0.0123583,0.0122235) (-4.28585,0) (-4.28585,0) (-360.272,0) (-480.718,-481.311) (-480.718,481.311)100A11830.025.5326e-200.0287(0,0) (0,0) (-4.18367,0) (-4.18367,0) (-4967370,0) (-0.648851,-7716220) (-0.648851,7716220) (-10531300,0)(-0.0124509,-0.0123729) (-0.0124509,0.0123729) (-4.30409,0) (-4.30409,0) (-354.218,0) (-487.528,-471.006) (-487.528,471.006)100A11830.025.51178e-200.0287(0,0) (0,0) (-4.15782,0) (-4.15782,0) (-4957930,0) (-0.648766,-7735670) (-0.648766,7735670) (-10615700,0)(-0.0123105,-0.0124009) (-0.0123105,0.0124009) (-4.30268,0) (-4.30268,0) (-353.203,0) (-484.23,-473.792) (-484.23,473.792)100A11770.028.46652e+170.02611(0,0) (0,0) (-31.63,0) (-160,0) (-350,0) (-3177,0)(-0.0368205,-0.0362649) (-0.0368205,0.0362649) (-32.55,0) (-142,0) (-364,-404) (-364,404) (-1260,0) (-4900,-5200) (-4900,5200) (-7100,-1700) (-7100,1700)100A11830.025.63486e-200.0287(0,0) (0,0) (-4.15754,0) (-4.15754,0) (-4967370,0) (-0.648851,-7716220) (-0.648851,7716220) (-10531300,0)(-0.0123555,-0.012281) (-0.0123555,0.012281) (-4.29034,0) (-4.29034,0) (-359.037,0) (-478.017,-480.029) (-478.017,480.029)100A19840.025.41001e+170.02611(0,0) (0,0) (-30.6662,0) (-140.772,0) (-324.384,0) (-2905.72,0)(-0.0121527,-0.0122348) (-0.0121527,0.0122348) (-32.0919,0) (-122.206,0) (-202.041,-286.293) (-202.041,286.293) (-1961.97,0) (-6700,-1300) (-6700,1300) (-4900,-5100) (-4900,5100)100A20020.0210449.70.0224(0,0) (0,0)(-0.0125277,-0.0112601) (-0.0125277,0.0112601) (-39.6745,-94.0963) (-39.6745,94.0963)100A19960.0211199.70.0224(0,0) (0,0)(-0.01244,-0.0116978) (-0.01244,0.0116978) (-39.7194,-98.0179) (-39.7194,98.0179)100A25080.029800.380.0224(0,0) (0,0)(-0.0123041,-0.0118102) (-0.0123041,0.0118102) (-40.4949,-90.2857) (-40.4949,90.2857)100A11830.025.64757e-200.0287(0,0) (0,0) (-4.09993,0) (-4.09993,0) (-5009300,0) (-0.660995,-7671910) (-0.660995,7671910) (-10607300,0)(-0.0124861,-0.012271) (-0.0124861,0.012271) (-4.25952,0) (-4.25952,0) (-348.445,0) (-470.034,-498.473) (-470.034,498.473)100A11880.025.64757e-200.0287(0,0) (0,0) (-4.09993,0) (-4.09993,0) (-5009300,0) (-0.660995,-7671910) (-0.660995,7671910) (-10607300,0)(-0.0124861,-0.012271) (-0.0124861,0.012271) (-4.25952,0) (-4.25952,0) (-348.445,0) (-470.034,-498.473) (-470.034,498.473)100A14590.021.15638e-200.0287(0,0) (0,0) (-12.3022,0) (-12.3022,0) (-6570210,0) (-42.2607,-10313100) (-42.2607,10313100) (-14285300,0)(-0.0370415,-0.0370331) (-0.0370415,0.0370331) (-12.6844,0) (-12.6844,0) (-290.705,0) (-427.381,-430.518) (-427.381,430.518)100A11830.025.57001e-200.0287(0,0) (0,0) (-4.16178,0) (-4.16178,0) (-4978020,0) (-0.648851,-7705960) (-0.648851,7705960) (-10569100,0)(-0.0123547,-0.012392) (-0.0123547,0.012392) (-4.28962,0) (-4.28962,0) (-356.898,0) (-480.023,-478.026) (-480.023,478.026)100A11830.025.56976e-200.0287(0,0) (0,0) (-4.16028,0) (-4.16028,0) (-4967370,0) (-0.648851,-7716220) (-0.648851,7716220) (-10531300,0)(-0.0124131,-0.0123031) (-0.0124131,0.0123031) (-4.28972,0) (-4.28972,0) (-354.203,0) (-483.554,-476.135) (-483.554,476.135)100A11830.024.78876e-200.0287(0,0) (0,0) (-4.57351,0) (-4.57351,0) (-4967370,0) (-0.648851,-7716220) (-0.648851,7716220) (-10531300,0)(-0.0123726,-0.0122094) (-0.0123726,0.0122094) (-4.73696,0) (-4.73696,0) (-322.111,0) (-412.947,-510.844) (-412.947,510.844)100A14590.023.82199e+150.02611(0,0) (0,0) (-8.78827,0) (-147.756,0) (-457.258,-470.233) (-457.258,470.233)(-0.0368108,-0.0368158) (-0.0368108,0.0368158) (-8.98704,0) (-251.093,0) (-370.227,0) (-96.1869,-399.665) (-96.1869,399.665) (-526.246,0) (-1057.12,-1006.07) (-1057.12,1006.07) (-13300,0)100A11830.025.53135e-200.0287(0,0) (0,0) (-4.18651,0) (-4.18651,0) (-4967370,0) (-0.648851,-7716220) (-0.648851,7716220) (-10531300,0)(-0.0123702,-0.0122802) (-0.0123702,0.0122802) (-4.33651,0) (-4.33651,0) (-350.634,0) (-493.385,-463.034) (-493.385,463.034)100A14590.021.69861e+150.02611(0,0) (0,0) (-8.93881,0) (-236.889,0) (-403.815,-474.131) (-403.815,474.131)(-0.0369225,-0.0367614) (-0.0369225,0.0367614) (-9.50505,0) (-212.02,0) (-255.693,0) (-405.329,0) (-94.8516,-433.467) (-94.8516,433.467) (-1213.18,-1019.93) (-1213.18,1019.93) (-13300,0)100A20160.026860.230.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.0129199,-0.0105736) (-0.0129199,0.0105736) (-0.0183613,0) (-0.0336159,0) (-35.9231,-74.0749) (-35.9231,74.0749)100A20090.026820.560.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.0120809,-0.0116364) (-0.0120809,0.0116364) (-0.0195644,0) (-0.0313307,0) (-35.9231,-74.0749) (-35.9231,74.0749)100A24590.026814.690.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.016382,0) (-0.0125283,-0.0123668) (-0.0125283,0.0123668) (-0.0329362,0) (-35.9231,-74.0749) (-35.9231,74.0749)100A33330.025538.130.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.0124656,-0.0117285) (-0.0124656,0.0117285) (-0.0203059,0) (-0.0272654,0) (-33.8373,-66.2576) (-33.8373,66.2576)100A27530.026813.970.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.0133345,0) (-0.0116711,-0.0136013) (-0.0116711,0.0136013) (-0.0442244,0) (-36.7897,-72.7725) (-36.7897,72.7725)100A25240.026789.810.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.0125273,-0.0117876) (-0.0125273,0.0117876) (-0.020271,0) (-0.0275303,0) (-32.9417,-75.4857) (-32.9417,75.4857)100A11830.025.07706e-200.0287(0,0) (0,0) (-4.85651,0) (-4.85651,0) (-4967370,0) (-0.648851,-7716220) (-0.648851,7716220) (-10531300,0)(-0.0123839,-0.0122778) (-0.0123839,0.0122778) (-5.03438,0) (-5.03438,0) (-318.902,0) (-399.277,-549.447) (-399.277,549.447)100A11830.025.50944e-200.0287(0,0) (0,0) (-4.19497,0) (-4.19497,0) (-4967370,0) (-0.648851,-7716220) (-0.648851,7716220) (-10531300,0)(-0.0125014,-0.0122927) (-0.0125014,0.0122927) (-4.34381,0) (-4.34381,0) (-347.783,0) (-500.146,-458.071) (-500.146,458.071)100A10900.028.57527e+170.02510(0,0) (0,0) (-160,0) (-350,0) (-3079,0)(-0.0121181,-0.0117984) (-0.0121181,0.0117984) (-146,0) (-360,-405) (-360,405) (-1234,0) (-4900,-5214.6) (-4900,5214.6) (-7200,-1647) (-7200,1647)100A1962.30.0210866.40.0224(0,0) (0,0)(-0.0122498,-0.0116576) (-0.0122498,0.0116576) (-37.268,-97.2936) (-37.268,97.2936)100A1954.50.0210670.60.0224(0,0) (0,0)(-0.0123635,-0.0115995) (-0.0123635,0.0115995) (-37.2822,-96.263) (-37.2822,96.263)100A2509.50.028395.180.0224(0,0) (0,0)(-0.0119453,-0.0120139) (-0.0119453,0.0120139) (-38.5104,-83.1362) (-38.5104,83.1362)100A19690.029090.120.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.0122807,-0.0117482) (-0.0122807,0.0117482) (-0.0210095,0) (-0.027718,0) (-34.738,-88.692) (-34.738,88.692)100A19750.0210697.80.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.0180542,0) (-0.0134109,-0.0122844) (-0.0134109,0.0122844) (-0.0267367,0) (-34.4227,-97.6563) (-34.4227,97.6563)100A31850.026145.120.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.0128141,-0.0118582) (-0.0128141,0.0118582) (-0.0208714,0) (-0.0256395,0) (-33.9018,-70.7223) (-33.9018,70.7223)100A14590.023.53936e+150.02611(0,0) (0,0) (-8.59502,0) (-159.005,0) (-463.1,-463.1) (-463.1,463.1)(-0.037,-0.037) (-0.037,0.037) (-8.96456,0) (-250.919,0) (-374.8,0) (-92.0285,-396.113) (-92.0285,396.113) (-520.3,0) (-1053,-1005) (-1053,1005) (-13300,0)100A19840.025.72255e+170.02611(0,0) (0,0) (-29.6349,0) (-142.146,0) (-319.932,0) (-2868.44,0)(-0.0123325,-0.0122229) (-0.0123325,0.0122229) (-31.2465,0) (-124.151,0) (-217.536,-283.443) (-217.536,283.443) (-1916.56,0) (-6700,-1300) (-6700,1300) (-4900,-5100) (-4900,5100)100A27510.026476.520.0224(0,0) (0,0)(-0.0122728,-0.0117124) (-0.0122728,0.0117124) (-33.8285,-72.9766) (-33.8285,72.9766)100A27470.027076.610.0224(0,0) (0,0)(-0.0121938,-0.0117676) (-0.0121938,0.0117676) (-33.3644,-77.1861) (-33.3644,77.1861)100A35400.025996.550.0224(0,0) (0,0)(-0.012264,-0.0118239) (-0.012264,0.0118239) (-38.5751,-67.108) (-38.5751,67.108)100A20070.0210049.50.0224(0,0) (0,0)(-0.0124406,-0.0116944) (-0.0124406,0.0116944) (-36.92,-93.1303) (-36.92,93.1303)100A25460.028685.330.0224(0,0) (0,0)(-0.0120444,-0.0120435) (-0.0120444,0.0120435) (-37.3118,-85.3914) (-37.3118,85.3914)100A19850.0210468.90.0224(0,0) (0,0)(-0.0124129,-0.0116091) (-0.0124129,0.0116091) (-36.5807,-95.4788) (-36.5807,95.4788)100A11830.025.59462e-200.0287(0,0) (0,0) (-4.14736,0) (-4.14736,0) (-4967370,0) (-0.648851,-7716220) (-0.648851,7716220) (-10531300,0)(-0.0123717,-0.0123107) (-0.0123717,0.0123107) (-4.30208,0) (-4.30208,0) (-355.847,0) (-483.928,-469.915) (-483.928,469.915)100A24040.029016.680.0224(0,0) (0,0)(-0.0123146,-0.0114659) (-0.0123146,0.0114659) (-36.0806,-87.7607) (-36.0806,87.7607)100A27180.025425.250.0224(0,0) (0,0)(-0.0127508,-0.0105642) (-0.0127508,0.0105642) (-37.3278,-63.3534) (-37.3278,63.3534)100A26210.025212.410.0224(0,0) (0,0)(-0.00935174,-4.59347e-05) (-0.00935174,4.59347e-05) (-36.0953,-62.2964) (-36.0953,62.2964)100A14590.023.54348e+170.02611(0,0) (0,0) (-15.15,0) (-176.6,0) (-463.1,-430.5) (-463.1,430.5)(-0.0371143,-0.0367) (-0.0371143,0.0367) (-15.64,0) (-255.097,0) (-374.8,0) (-97.34,-400.7) (-97.34,400.7) (-520.3,0) (-13300,0) (-10530,-10050) (-10530,10050)100A11830.025.5559e-200.0287(0,0) (0,0) (-4.18093,0) (-4.18093,0) (-4967370,0) (-0.648851,-7716220) (-0.648851,7716220) (-10531300,0)(-0.0123857,-0.0122805) (-0.0123857,0.0122805) (-4.33552,0) (-4.33552,0) (-352.257,0) (-491.139,-464.224) (-491.139,464.224)100A26090.025935.80.0224(0,0) (0,0)(-0.0124847,-0.0116423) (-0.0124847,0.0116423) (-38.9012,-66.437) (-38.9012,66.437)100A26000.026226.570.0224(0,0) (0,0)(-0.0123504,-0.0117978) (-0.0123504,0.0117978) (-38.8306,-68.6473) (-38.8306,68.6473)100A32530.025660.060.0224(0,0) (0,0)(-0.0123979,-0.0117471) (-0.0123979,0.0117471) (-39.1797,-64.175) (-39.1797,64.175)100A26480.025240.690.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.0165847,0) (-0.0122706,-0.0124808) (-0.0122706,0.0124808) (-0.0341837,0) (-35.0417,-63.0342) (-35.0417,63.0342)100A25800.025408.30.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.0122499,-0.0120903) (-0.0122499,0.0120903) (-0.0182008,0) (-0.0317911,0) (-34.9469,-64.4904) (-34.9469,64.4904)100A31420.025255.260.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.0110237,-0.0113277) (-0.0110237,0.0113277) (-0.0237183,0) (-0.0311233,0) (-35.0417,-63.0342) (-35.0417,63.0342)100A19940.0211936.50.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.0107135,-0.00822254) (-0.0107135,0.00822254) (-0.0183411,0) (-0.0417065,0) (-37.6024,-100.891) (-37.6024,100.891)100A19450.0211700.10.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.0107977,-0.0102305) (-0.0107977,0.0102305) (-0.0215564,0) (-0.0315032,0) (-37.6024,-100.891) (-37.6024,100.891)100A23170.0210213.60.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.0187876,0) (-0.0190447,0) (-0.0148672,-0.0135206) (-0.0148672,0.0135206) (-37.6998,-94.3943) (-37.6998,94.3943)100A2767.480.053951.310.0546(0,0) (0,0) (-0.0378489,0) (-0.0378489,0)(-0.0181146,0) (-0.0157481,-0.01234) (-0.0157481,0.01234) (-0.0503,0) (-39.18,-49.12) (-39.18,49.12)100A2742.390.053951.320.0546(0,0) (0,0) (-0.0368894,0) (-0.0368894,0)(-0.0174233,0) (-0.0158742,-0.01234) (-0.0158742,0.01234) (-0.0490149,0) (-39.18,-49.12) (-39.18,49.12)100A3580.540.053949.210.0546(0,0) (0,0) (-0.0135709,0) (-0.0135709,0)(-0.00773287,0) (-0.0126464,-0.01234) (-0.0126464,0.01234) (-0.0190196,0) (-39.18,-49.12) (-39.18,49.12)100A14590.023.54692e+170.02611(0,0) (0,0) (-15.15,0) (-176.6,0) (-463.1,-430.5) (-463.1,430.5)(-0.0372963,-0.0367) (-0.0372963,0.0367) (-15.64,0) (-255.097,0) (-374.8,0) (-97.34,-400.7) (-97.34,400.7) (-520.3,0) (-13300,0) (-10530,-10050) (-10530,10050)100A11830.025.59942e-200.0287(0,0) (0,0) (-4.18131,0) (-4.18131,0) (-4967370,0) (-0.648851,-7716220) (-0.648851,7716220) (-10531300,0)(-0.0123607,-0.0121577) (-0.0123607,0.0121577) (-4.35447,0) (-4.35447,0) (-354.516,0) (-487.994,-463.977) (-487.994,463.977)100A194480.025.81747e+120.0259(0,0) (0,0) (-15.15,0) (-318.6,-401.2) (-318.6,401.2)(-0.0365757,-0.03708) (-0.0365757,0.03708) (-15.99,0) (-187.239,0) (-100.9,-401.9) (-100.9,401.9) (-417.1,0) (-7454,-7142) (-7454,7142)100A27130.026395.370.0224(0,0) (0,0)(-0.0121331,-0.0117019) (-0.0121331,0.0117019) (-33.6462,-72.5129) (-33.6462,72.5129)100A24770.029848.070.0224(0,0) (0,0)(-0.0123744,-0.0114168) (-0.0123744,0.0114168) (-37.3751,-91.8446) (-37.3751,91.8446)100A26350.025596.960.0224(0,0) (0,0)(-0.013085,-0.00901346) (-0.013085,0.00901346) (-27.551,-69.3224) (-27.551,69.3224)100A11830.025.62673e-200.0287(0,0) (0,0) (-4.17306,0) (-4.17306,0) (-4967370,0) (-0.648851,-7716220) (-0.648851,7716220) (-10531300,0)(-0.0123476,-0.0123238) (-0.0123476,0.0123238) (-4.32784,0) (-4.32784,0) (-355.557,0) (-484.407,-472.819) (-484.407,472.819)100A1922.380.026248.850.0224(0,0) (0,0)(-0.0124509,-0.0115219) (-0.0124509,0.0115219) (-36.4093,-70.0956) (-36.4093,70.0956)100A1947.110.026894.120.0224(0,0) (0,0)(-0.0126727,-0.0112779) (-0.0126727,0.0112779) (-36.0256,-74.7035) (-36.0256,74.7035)100A2457.260.028700.470.0224(0,0) (0,0)(-0.0124712,-0.0114189) (-0.0124712,0.0114189) (-35.2188,-86.2837) (-35.2188,86.2837)100A20730.028630.590.0224(0,0) (0,0)(-0.0123254,-0.0117527) (-0.0123254,0.0117527) (-39.7105,-83.9327) (-39.7105,83.9327)100A28660.026574.230.0224(0,0) (0,0)(-0.0123108,-0.0118063) (-0.0123108,0.0118063) (-34.9659,-73.1126) (-34.9659,73.1126)100A24840.029165.820.0224(0,0) (0,0)(-0.0122681,-0.0117766) (-0.0122681,0.0117766) (-39.4061,-87.2044) (-39.4061,87.2044)100A11830.025.40039e-200.0287(0,0) (0,0) (-4.11431,0) (-4.11431,0) (-4963050,0) (-0.648851,-7755830) (-0.648851,7755830) (-10661800,0)(-0.0124127,-0.012246) (-0.0124127,0.012246) (-4.25511,0) (-4.25511,0) (-349.074,0) (-487.679,-471.563) (-487.679,471.563)100A14590.0245.68010.0267(0,0) (0,0) (-9.42478,0) (-628.319,0) (-565.487,-979.452) (-565.487,979.452)(-0.0373128,-0.0367) (-0.0373128,0.0367) (-9.73894,0) (-219.911,-138.23) (-219.911,138.23) (-219.911,-684.867) (-219.911,684.867)100A18860.0286374.60.0225(0,0) (0,0)(-0.0048004,0) (-0.0741905,0) (-22.7121,-27.1065) (-22.7121,27.1065) (-59.4313,0)100A19880.0286591.30.0225(0,0) (0,0)(-0.0048004,0) (-0.0749082,0) (-22.7121,-27.1065) (-22.7121,27.1065) (-59.4313,0)100A19270.0286032.20.0225(0,0) (0,0)(-0.0048004,0) (-0.073046,0) (-22.7121,-27.1065) (-22.7121,27.1065) (-59.4313,0)100A14590.0245.55270.0267(0,0) (0,0) (-9.42478,0) (-628.319,0) (-565.487,-979.452) (-565.487,979.452)(-0.037,-0.037) (-0.037,0.037) (-9.73894,0) (-219.911,-138.23) (-219.911,138.23) (-219.911,-684.867) (-219.911,684.867)100A11830.025.40446e-200.0287(0,0) (0,0) (-4.1654,0) (-4.1654,0) (-4963010,0) (-0.648848,-7752660) (-0.648848,7752660) (-10639900,0)(-0.012411,-0.0121727) (-0.012411,0.0121727) (-4.3,0) (-4.3,0) (-349.159,0) (-490.671,-469.042) (-490.671,469.042)100A20380.0288768.20.0225(0,0) (0,0)(-0.0048004,0) (-0.081859,0) (-22.7121,-27.1065) (-22.7121,27.1065) (-59.4313,0)100A20560.0286916.10.0225(0,0) (0,0)(-0.0048004,0) (-0.0759741,0) (-22.7121,-27.1065) (-22.7121,27.1065) (-59.4313,0)100A20410.0287156.20.0225(0,0) (0,0)(-0.0048004,0) (-0.0767555,0) (-22.7121,-27.1065) (-22.7121,27.1065) (-59.4313,0)100A11830.025.72001e-200.0287(0,0) (0,0) (-4.11431,0) (-4.11431,0) (-4967370,0) (-0.648851,-7716220) (-0.648851,7716220) (-10531300,0)(-0.0124697,-0.0122578) (-0.0124697,0.0122578) (-4.26609,0) (-4.26609,0) (-335.558,0) (-467.126,-524.789) (-467.126,524.789)100A20350.0286207.60.0225(0,0) (0,0)(-0.0048004,0) (-0.0736339,0) (-22.7121,-27.1065) (-22.7121,27.1065) (-59.4313,0)100A20620.02864970.0225(0,0) (0,0)(-0.0048004,0) (-0.0745966,0) (-22.7121,-27.1065) (-22.7121,27.1065) (-59.4313,0)100A19410.0286514.40.0225(0,0) (0,0)(-0.0048004,0) (-0.0746539,0) (-22.7121,-27.1065) (-22.7121,27.1065) (-59.4313,0)100A14590.021.14907e-200.0287(0,0) (0,0) (-12.2649,0) (-12.2649,0) (-6570210,0) (-42.2607,-10313100) (-42.2607,10313100) (-14285300,0)(-0.0373557,-0.0366344) (-0.0373557,0.0366344) (-12.615,0) (-12.615,0) (-264.862,0) (-428.117,-467.724) (-428.117,467.724)100A194480.023.55136e+170.02611(0,0) (0,0) (-15.15,0) (-176.6,0) (-463.1,-430.5) (-463.1,430.5)(-0.0371852,-0.0362) (-0.0371852,0.0362) (-15.64,0) (-255.097,0) (-374.8,0) (-97.34,-400.7) (-97.34,400.7) (-520.3,0) (-13300,0) (-10530,-10050) (-10530,10050)100A20180.027435.390.0224(0,0) (0,0)(-0.0126618,-0.0116162) (-0.0126618,0.0116162) (-42.1485,-75.1378) (-42.1485,75.1378)100A27520.025752.070.0224(0,0) (0,0)(-0.0122876,-0.0118516) (-0.0122876,0.0118516) (-36.7393,-66.3136) (-36.7393,66.3136)100A24230.026628.590.0224(0,0) (0,0)(-0.0120794,-0.0121685) (-0.0120794,0.0121685) (-41.6216,-69.9714) (-41.6216,69.9714)100A10900.028.57165e+170.02510(0,0) (0,0) (-160,0) (-350,0) (-3079,0)(-0.0119066,-0.0118625) (-0.0119066,0.0118625) (-146,0) (-360,-405) (-360,405) (-1234,0) (-4900,-5214.6) (-4900,5214.6) (-7200,-1647) (-7200,1647)100A194480.023.54491e+170.02611(0,0) (0,0) (-15.15,0) (-176.6,0) (-463.1,-430.5) (-463.1,430.5)(-0.0371901,-0.0367) (-0.0371901,0.0367) (-15.64,0) (-255.097,0) (-374.8,0) (-97.34,-400.7) (-97.34,400.7) (-520.3,0) (-13300,0) (-10530,-10050) (-10530,10050)100A11830.025.56241e-200.0287(0,0) (0,0) (-4.1269,0) (-4.1269,0) (-4967370,0) (-0.648851,-7716220) (-0.648851,7716220) (-10531300,0)(-0.0123944,-0.0122528) (-0.0123944,0.0122528) (-4.28739,0) (-4.28739,0) (-347.682,0) (-476.117,-484.621) (-476.117,484.621)100A11830.025.76827e-200.0287(0,0) (0,0) (-4.06979,0) (-4.06979,0) (-4995020,0) (-0.761891,-7633020) (-0.761891,7633020) (-10497100,0)(-0.012538,-0.0121025) (-0.012538,0.0121025) (-4.21681,0) (-4.21681,0) (-338.439,0) (-467.832,-515.504) (-467.832,515.504)100A2081.520.0286918.20.0225(0,0) (0,0)(-0.0048,0) (-0.0772019,0) (-22.9288,-26.9261) (-22.9288,26.9261) (-59.1685,0)100A2050.710.0294134.10.0225(0,0) (0,0)(-0.0048,0) (-0.0772121,0) (-27.2008,-28.5262) (-27.2008,28.5262) (-51.586,0)100A2004.360.0293637.10.0225(0,0) (0,0)(-0.0048,0) (-0.0766217,0) (-28.0014,-29.4513) (-28.0014,29.4513) (-48.375,0)100A14590.021.16691e-200.0287(0,0) (0,0) (-12.346,0) (-12.346,0) (-6570210,0) (-42.2607,-10313100) (-42.2607,10313100) (-14285300,0)(-0.0370098,-0.0370024) (-0.0370098,0.0370024) (-12.6913,0) (-12.6913,0) (-305.533,0) (-417.708,-425.443) (-417.708,425.443)100A21750.053947.840.0524(0,0) (0,0)(-0.01234,-0.01234) (-0.01234,0.01234) (-39.18,-49.12) (-39.18,49.12)100A21350.053947.840.0524(0,0) (0,0)(-0.01234,-0.01234) (-0.01234,0.01234) (-39.18,-49.12) (-39.18,49.12)100A23980.053947.840.0524(0,0) (0,0)(-0.01234,-0.01234) (-0.01234,0.01234) (-39.18,-49.12) (-39.18,49.12)100A14590.021.1389e-200.0287(0,0) (0,0) (-12.3599,0) (-12.3599,0) (-6570210,0) (-42.2607,-10313100) (-42.2607,10313100) (-14285300,0)(-0.0370066,-0.0371122) (-0.0370066,0.0371122) (-12.6638,0) (-12.6638,0) (-291.59,0) (-427.844,-427.844) (-427.844,427.844)100A11770.028.46366e+170.02611(0,0) (0,0) (-31.63,0) (-160,0) (-350,0) (-3177,0)(-0.0367564,-0.0362649) (-0.0367564,0.0362649) (-32.55,0) (-142,0) (-364,-404) (-364,404) (-1260,0) (-4900,-5200) (-4900,5200) (-7100,-1700) (-7100,1700)100A19560.028098.870.0224(0,0) (0,0)(-0.0125008,-0.0116928) (-0.0125008,0.0116928) (-36.3947,-82.2369) (-36.3947,82.2369)100A19590.027756.420.0224(0,0) (0,0)(-0.0124521,-0.0117804) (-0.0124521,0.0117804) (-36.2287,-80.2159) (-36.2287,80.2159)100A2457.90.027702.710.0224(0,0) (0,0)(-0.0123816,-0.0117844) (-0.0123816,0.0117844) (-35.9263,-80.023) (-35.9263,80.023)100A11830.025.52413e-200.0287(0,0) (0,0) (-4.19368,0) (-4.19368,0) (-4967370,0) (-0.648851,-7716220) (-0.648851,7716220) (-10531300,0)(-0.0123581,-0.0123065) (-0.0123581,0.0123065) (-4.34355,0) (-4.34355,0) (-347.423,0) (-491.705,-468.821) (-491.705,468.821)100A20240.021218.930.0224(0,0) (0,0)(-0.0109449,-0.0131164) (-0.0109449,0.0131164) (-34.9554,-0.935745) (-34.9554,0.935745)100A19910.028867.160.0224(0,0) (0,0)(-0.012309,-0.0118499) (-0.012309,0.0118499) (-39.1148,-85.6123) (-39.1148,85.6123)100A24860.027251.690.0224(0,0) (0,0)(-0.0122004,-0.0118719) (-0.0122004,0.0118719) (-39.1652,-75.584) (-39.1652,75.584)100A19800.022.0183e+160.02510(0,0) (0,0) (-180.707,0) (-1406.65,0) (-2745.28,0)(-0.0123298,-0.0122478) (-0.0123298,0.0122478) (-140.468,0) (-304.361,0) (-213.254,-269.123) (-213.254,269.123) (-4900,-5214.6) (-4900,5214.6) (-7200,-1647) (-7200,1647)100A19880.022.0183e+160.02510(0,0) (0,0) (-180.707,0) (-1406.65,0) (-2745.28,0)(-0.0123298,-0.0122478) (-0.0123298,0.0122478) (-140.468,0) (-304.361,0) (-213.254,-269.123) (-213.254,269.123) (-4900,-5214.6) (-4900,5214.6) (-7200,-1647) (-7200,1647)100A19840.022.0183e+160.02510(0,0) (0,0) (-180.707,0) (-1406.65,0) (-2745.28,0)(-0.0123298,-0.0122478) (-0.0123298,0.0122478) (-140.468,0) (-304.361,0) (-213.254,-269.123) (-213.254,269.123) (-4900,-5214.6) (-4900,5214.6) (-7200,-1647) (-7200,1647)100A11830.025.4919e-200.0287(0,0) (0,0) (-4.19394,0) (-4.19394,0) (-4967370,0) (-0.648851,-7716220) (-0.648851,7716220) (-10531300,0)(-0.0123506,-0.0123236) (-0.0123506,0.0123236) (-4.34607,0) (-4.34607,0) (-347.796,0) (-496.286,-460.028) (-496.286,460.028)100A24470.053950.40.0546(0,0) (0,0) (-0.0243363,0) (-0.0243363,0)(-0.0124762,-0.01222) (-0.0124762,0.01222) (-0.0219936,0) (-0.0285787,0) (-39.18,-49.12) (-39.18,49.12)100A19340.0215587.50.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.00202008,0) (-0.0156266,-0.010174) (-0.0156266,0.010174) (-0.112489,0) (-40.2456,-101.574) (-40.2456,101.574)100A18870.0212297.30.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.00792808,0) (-0.0117379,-0.0103951) (-0.0117379,0.0103951) (-0.0737261,0) (-39.7608,-96.8381) (-39.7608,96.8381)100A26030.023534.480.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.0129535,-0.0124299) (-0.0129535,0.0124299) (-0.0203314,0) (-0.0262853,0) (-38.4798,-45.3674) (-38.4798,45.3674)100A18190.024328.090.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.0138902,-0.0106088) (-0.0138902,0.0106088) (-0.0201903,0) (-0.0281366,0) (-38.4729,-53.13) (-38.4729,53.13)100A21320.023792.680.0246(0,0) (0,0) (-0.0242718,0) (-0.0242718,0)(-0.00231045,-0.0168023) (-0.00231045,0.0168023) (-0.0238366,0) (-0.0251998,0) (-38.2699,-48.9166) (-38.2699,48.9166)10010117.218417.218439A4.18952e-13 0.000330318 0.00102921 -0.00314123 0.000205709 0.00152521 -0.00623193 0.0104801 -0.0131202 0.0107821 -0.00144455 -0.0158729 0.0395074 -0.0651036 0.0853716 -0.0891913 0.0500619 0.837233 0.266723 -0.166693 0.095284 -0.0509218 0.0161458 0.00706362 -0.0183877 0.0199414 -0.0154895 0.00852735 -0.00255789 -0.00181103 0.00242649 -0.00375769 0.000467293 0.000633072 -1.56874e-06 -1.2548e-05 3.21041e-07 -2.63324e-08 -5.09997e-0810132.6132.6167A-3.65342e-17 3.67488e-08 -4.2706e-07 1.14502e-06 -1.87594e-07 -3.37274e-07 2.78747e-06 -3.74403e-06 5.41172e-06 7.47336e-06 -0.000517759 0.000210677 4.63258e-05 -0.000608222 0.00144175 -0.00240627 0.00322534 -0.00350639 0.00281441 -0.000771971 -0.00280512 0.00777805 -0.0135815 0.0191765 -0.0229704 0.0240398 -0.0220986 0.00860734 0.0117525 -0.0447787 0.0964923 -0.191755 0.527652 0.724167 -0.156905 0.0442574 0.00314168 -0.0266714 0.0361532 -0.0385687 0.0310842 -0.0235259 0.0153211 -0.00740398 0.00109645 0.00309797 -0.0051932 0.00556131 -0.0047611 0.00338213 -0.00192052 0.000715218 7.67719e-05 -0.000451897 0.0005027 -0.000565037 -5.568e-05 1.57736e-05 -1.41985e-06 8.14909e-07 6.80795e-07 -1.25273e-06 1.52435e-06 -2.83336e-07 -1.06384e-08 1.25712e-09 -5.42954e-111988-01-01T00:00:00.0000ZGlobal Seismograph Network - IRIS/USGS (GSN)falsetrue{"type":"DOI","value":"10.7914/SN/IU"}FDSNXML:Identifier/02009-07-19T00:00:00.0000Z(GSN) Global Seismograph Network (IRIS/USGS)51.8823-176.6842130falsetrue2018-09-05T02:00:00.0000Z51.8823-176.68421292018-09-05T02:00:00.0000Zxxxx1yyyy140111019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-09-05T02:00:00.0000Zxxxx2yyyy2401191019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-09-05T02:00:00.0000Zxxxx0yyyy040110-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor1993-08-24T00:00:00.0000Z(GSN) IRIS/USGS (IU) and Samoa Met. Div.-13.9085-171.7827706falsetrue2018-03-26T00:00:00.0000Z-13.9085-171.78277052018-03-26T00:00:00.0000Zxxxx1yyyy140110019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-03-26T00:00:00.0000Zxxxx2yyyy2401190019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-03-26T00:00:00.0000Zxxxx0yyyy040110-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2002-11-19T21:07:00.0000Z(GSN) IRIS/USGS (IU) and ANSS34.94591-106.45721820falsetrue2018-07-09T20:45:00.0000Z34.94591-106.45721632.72018-07-09T20:45:00.0000Zxxxx1yyyy1401188156019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-07-09T20:45:00.0000Zxxxx2yyyy2401188246019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-07-09T20:45:00.0000Zxxxx0yyyy04011880-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2002-05-30T00:00:00.0000Z(GSN) IRIS/USGS (IU), DEPREM, and METU39.86832.79341090falsetrue2016-06-22T00:00:00.0000Z39.86832.79349152016-06-22T00:00:00.0000Zxxxx1yyyy1401175196019746800000.02m/sCGfalse{"description":"T120 post hole, quiet"}FDSNXML:Sensor2016-06-22T00:00:00.0000Zxxxx2yyyy2401175286019746800000.02m/sCGfalse{"description":"T120 post hole, quiet"}FDSNXML:Sensor2016-06-22T00:00:00.0000Zxxxx0yyyy04011750-9019746800000.02m/sCGfalse{"description":"T120 post hole, quiet"}FDSNXML:Sensor2008-08-01T00:00:00.0000Z(GSN) IRIS/USGS (IU)32.371299-64.69629930falsetrue2018-07-31T00:00:00.0000Z32.371299-64.696299-12018-07-31T00:00:00.0000Zxxxx1yyyy140131287019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-07-31T00:00:00.0000Zxxxx2yyyy24013117019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-07-31T00:00:00.0000Zxxxx0yyyy0401310-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor1995-08-16T00:00:00.0000Z(GSN) IRIS/USGS (IU) and GSRAS68.0653166.4531320falsetrue2022-11-21T07:45:00.0000Z68.0653166.45313182022-11-21T07:45:00.0000Zxxxx1yyyy140122033286100000.02m/sCGfalse{"description":"Trillium T360 Surface Seismometer"}FDSNXML:Sensor2022-11-21T07:45:00.0000Zxxxx2yyyy2401292033286100000.02m/sCGfalse{"description":"Trillium T360 Surface Seismometer"}FDSNXML:Sensor2022-11-21T07:45:00.0000Zxxxx0yyyy040120-9033286100000.02m/sCGfalse{"description":"Trillium T360 Surface Seismometer"}FDSNXML:Sensor1996-02-19T00:00:00.0000Z(GSN) IRIS/USGS (IU) and ANARE-66.2792110.535410falsetrue2012-01-20T00:00:00.0000Z-66.2792110.535452012-01-20T00:00:00.0000Zxxxx1yyyy120150033588200000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2012-01-20T00:00:00.0000Zxxxx2yyyy2201590033487600000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2012-01-20T00:00:00.0000Zxxxx0yyyy020150-9042077700000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor1989-07-28T00:00:00.0000Z(GSN) IRIS/USGS (IU), St Louis Univ., and ANSS38.0557-91.2446222falsetrue2018-06-09T18:00:00.0000Z38.0557-91.24461712018-06-09T18:00:00.0000Zxxxx1yyyy1401519019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-06-09T18:00:00.0000Zxxxx2yyyy24015199019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-06-09T18:00:00.0000Zxxxx0yyyy0401510-9019931400000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2009-09-19T14:35:27.0000Z(GSN) IRIS/USGS (IU) and Thai Met. Dept.18.814198.9443420falsetrue2023-01-30T15:21:00.0000Z18.814198.94434202023-01-30T15:21:00.0000Zxxxx1yyyy140100024478100000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2023-01-30T15:21:00.0000Zxxxx2yyyy2401090024478100000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2023-01-30T15:21:00.0000Zxxxx0yyyy040100-9024478100000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2009-07-09T00:00:00.0000Z(GSN) IRIS/USGS (IU), Univ. of Alaska, and ANSS64.873599-147.8616200falsetrue2018-08-30T00:00:00.0000Z64.873599-147.8616832018-08-30T00:00:00.0000Zxxxx1yyyy140111741019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-08-30T00:00:00.0000Zxxxx2yyyy2401117131019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-08-30T00:00:00.0000Zxxxx0yyyy04011170-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor1989-10-26T00:00:00.0000Z(GSN) IRIS/USGS (IU), Oregon State Univ., and ANSS44.5855-123.3046110falsetrue2020-10-27T18:30:00.0000Z44.5855-123.3046-402020-10-27T18:30:00.0000Zxxxx1yyyy1401150130019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2020-10-27T18:30:00.0000Zxxxx2yyyy2401150220019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2020-10-27T18:30:00.0000Zxxxx0yyyy04011500-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2019-05-17T00:00:00.0000Z(GSN) IRIS/USGS (IU) and Geoscience Australia-20.08765146.24998367falsetrue2019-05-17T00:00:00.0000Z-20.08765146.249983662019-05-17T00:00:00.0000Zxxxx1yyyy14011356019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2019-05-17T00:00:00.0000Zxxxx2yyyy2401186019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2019-05-17T00:00:00.0000Zxxxx0yyyy040110-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2011-07-27T00:00:00.0000Z(GSN) IRIS/USGS (IU) and PHIVOLCS7.0697125.5791151falsetrue2015-09-04T06:00:00.0000Z7.0697125.57911502015-09-04T06:00:00.0000Zxxxx1yyyy140110024478000000.02m/sCGfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor2015-09-04T06:00:00.0000Zxxxx2yyyy2401190024478000000.02m/sCGfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor2015-09-04T06:00:00.0000Zxxxx0yyyy040110-9024478000000.02m/sCGfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor1998-08-02T00:00:00.0000Z(GSN) IRIS/USGS (IU) and ANSS28.1103-81.432730falsetrue2022-02-28T00:00:00.0000Z28.1103-81.4327-1322022-02-28T00:00:00.0000Zxxxx1yyyy1401162327019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2022-02-28T00:00:00.0000Zxxxx2yyyy240116257019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2022-02-28T00:00:00.0000Zxxxx0yyyy04011620-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2004-01-21T20:24:00.0000Z(GSN) IRIS/USGS (IU), NIED, Tuvalu Met Serv-8.5259179.196620falsetrue2021-12-24T00:00:00.0000Z-8.5259179.1966202021-12-24T00:00:00.0000Zxxxx1yyyy14010359024478000000.02m/sCGfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor2021-12-24T00:00:00.0000Zxxxx2yyyy2401089024478000000.02m/sCGfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor2021-12-24T00:00:00.0000Zxxxx0yyyy040100-9024478000000.02m/sCGfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor1997-09-01T00:00:00.0000Z(GSN) IRIS/USGS (IU), MEDNET, Addis Ababa Univ8.895238.67982570falsetrue2013-09-18T08:40:00.0000Z8.895238.679825652013-09-18T08:40:00.0000Zxxxx1yyyy12015180033823200000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2013-09-18T08:40:00.0000Zxxxx2yyyy22015270033705800000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2013-09-18T08:40:00.0000Zxxxx0yyyy0201509041255600000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor1991-07-10T00:00:00.0000Z(GSN) IRIS/USGS (IU) and WSSP40.14844.7411609falsetrue2014-03-19T18:16:59.0000Z40.14844.74115092014-03-19T18:16:59.0000Zxxxx0yyyy02011000-9055919000000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2014-04-05T00:00:00.0000Zxxxx1yyyy12011000046188200000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2014-04-05T00:00:00.0000Zxxxx2yyyy220110090042346100000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2015-01-19T00:00:00.0000Z(GSN) IRIS/USGS (IU) and GFZ49.690911.2203384falsetrue2019-11-20T17:00:00.0000Z49.690911.22032852019-11-20T17:00:00.0000Zxxxx1yyyy14019933404961880000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2019-11-20T17:00:00.0000Zxxxx2yyyy2401996404961880000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2019-11-20T17:00:00.0000Zxxxx0yyyy0401990-904961880000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2010-01-12T00:00:00.0000Z(GSN) IRIS/USGS (IU)13.5893144.8684170falsetrue2021-11-09T00:00:00.0000Z13.5893144.8684772021-11-09T00:00:00.0000Zxxxx1yyyy140193309019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2021-11-09T00:00:00.0000Zxxxx2yyyy24019339019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2021-11-09T00:00:00.0000Zxxxx0yyyy0401930-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-01-26T16:50:00.0000Z(GSN) IRIS/USGS (IU), UT Austin, and ANSS29.96478-95.8381269falsetrue2018-01-26T16:50:00.0000Z29.96478-95.83812-242018-01-26T16:50:00.0000Zxxxx1yyyy140193285019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-01-26T16:50:00.0000Zxxxx2yyyy24019315019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-01-26T16:50:00.0000Zxxxx0yyyy0401930-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor1993-10-22T00:00:00.0000Z(GSN) IRIS/USGS (IU), Solomon Is. Min Mines & Ener-9.4387159.9475100falsetrue2020-01-24T23:00:00.0000Z-9.4387159.9475282020-01-24T23:00:00.0000Zxxxx1yyyy140172278018287200000.02m/sCGfalse{"description":"Trillium 360"}FDSNXML:Sensor2020-01-24T23:00:00.0000Zxxxx2yyyy2401728018287200000.02m/sCGfalse{"description":"Trillium 360"}FDSNXML:Sensor2020-01-24T23:00:00.0000Zxxxx0yyyy0401720-9018287200000.02m/sCGfalse{"description":"Trillium 360"}FDSNXML:Sensor2008-09-22T00:00:00.0000Z(GSN) IRIS/USGS, Harvard University42.5064-71.5583200falsetrue2011-07-25T21:11:30.0000Z42.5064-71.55832002011-07-25T21:11:30.0000Zxxxx1yyyy120100032922100000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2014-01-16T20:32:00.0000Zxxxx2yyyy2201090032791300000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2016-10-14T16:00:00.0000Zxxxx0yyyy020100-9042102800000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2013-11-21T00:00:00.0000Z(GSN) IRIS/USGS (IU) and Korea Met. Adminstration37.47768126.6243655falsetrue2013-11-22T13:18:00.0000Z37.47768126.6243652.52013-11-22T13:18:00.0000Zxxxx1yyyy12012.50033034700000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2013-11-22T13:18:00.0000Zxxxx2yyyy22012.590033135300000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2013-11-22T13:18:00.0000Zxxxx0yyyy02012.50-9053436000000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor1998-07-24T00:00:00.0000Z(GSN) IRIS/USGS (IU)16.7329-169.52922falsetrue2021-05-29T00:00:00.0000Z16.7329-169.52922.52021-05-29T00:00:00.0000Zxxxx1yyyy140100024478000000.02m/sCGfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor2021-05-29T00:00:00.0000Zxxxx2yyyy2401090024478000000.02m/sCGfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor2021-05-29T00:00:00.0000Zxxxx0yyyy040100-9024478000000.02m/sCGfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor1994-11-05T00:00:00.0000Z(GSN) IRIS/USGS (IU), GEOFON (GE), AWI, and UB78.915411.938590falsetrue2022-10-25T14:00:00.0000Z78.915411.9385872022-10-25T14:00:00.0000Zxxxx1yyyy140130033286100000.02m/sCGfalse{"description":"Trillium T360 Surface Seismometer"}FDSNXML:Sensor2022-10-25T14:00:00.0000Zxxxx2yyyy2401390033286100000.02m/sCGfalse{"description":"Trillium T360 Surface Seismometer"}FDSNXML:Sensor2022-10-25T14:00:00.0000Zxxxx0yyyy040130-9033286100000.02m/sCGfalse{"description":"Trillium T360 Surface Seismometer"}FDSNXML:Sensor1993-06-07T00:00:00.0000Z(GSN) IRIS/USGS (IU) and Univ. Helsinki69.756527.0035100falsetrue2012-06-18T00:00:00.0000Z69.756527.0035852012-06-18T00:00:00.0000Zxxxx1yyyy120115354046154600000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2012-06-18T00:00:00.0000Zxxxx2yyyy22011584046087500000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2012-06-18T00:00:00.0000Zxxxx0yyyy0201150-9059392000000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2017-11-07T00:00:00.0000Z(GSN) IRIS/USGS (IU) and Nat. Acad. Sci. Ukraine50.701229.2242180falsetrue2017-11-07T00:00:00.0000Z50.701229.22421802017-11-07T00:00:00.0000Zxxxx1yyyy120101033672300000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2017-11-07T00:00:00.0000Zxxxx0yyyy020100-9042715200000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2018-03-31T10:00:00.0000Zxxxx2yyyy2201091033303200000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor1988-08-15T00:00:00.0000Z(GSN) IRIS/USGS (IU), GEOSCOPE (G), and NOAA21.42-158.0112110falsetrue2022-08-08T20:00:00.0000Z21.42-158.0112772022-08-08T20:00:00.0000Zxxxx1yyyy140133312019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2022-08-08T20:00:00.0000Zxxxx2yyyy24013342019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2022-08-08T20:00:00.0000Zxxxx0yyyy0401330-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor1995-09-17T00:00:00.0000Z(GSN) IRIS/USGS (IU), GEOFON (GE), & Univ. Nairobi-1.127137.25251950falsetrue2014-05-14T00:00:00.0000Z-1.127137.252519302014-05-14T00:00:00.0000Zxxxx0yyyy0201200-9040332800000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2021-10-10T12:00:00.0000Zxxxx1yyyy1201200045600900000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2021-10-10T12:00:00.0000Zxxxx2yyyy22012090043973500000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2007-12-04T00:00:00.0000Z(GSN) IRIS/USGS (IU), NIED, Kiribati Weather Serv-2.7744-171.718620falsetrue2011-04-25T15:00:00.0000Z-2.7744-171.7186182011-04-25T15:00:00.0000Zxxxx1yyyy140120024478000000.02m/sCGfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor2011-04-25T15:00:00.0000Zxxxx2yyyy2401290024478000000.02m/sCGfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor2016-06-06T09:00:00.0000Zxxxx0yyyy040120-9024478000000.02m/sCGfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor2022-10-31T10:00:00.0000Z(GSN) IRIS/USGS (IU) and Univ. Bergen59.65219.5946558falsetrue2022-10-31T10:00:00.0000Z59.65219.59462162022-10-31T10:00:00.0000Zxxxx1yyyy14013420019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2022-10-31T10:00:00.0000Zxxxx2yyyy240134290019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2022-10-31T10:00:00.0000Zxxxx0yyyy04013420-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2014-08-04T00:00:00.0000Z(GSN) IRIS/USGS (IU),CIW,Fund Andes,Las Camp. Obs.-29.011-70.70052274falsetrue2014-08-04T00:00:00.0000Z-29.011-70.700522742014-08-04T00:00:00.0000Zxxxx1yyyy120100043772200000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2014-08-04T00:00:00.0000Zxxxx2yyyy2201090043621200000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2014-08-04T00:00:00.0000Zxxxx0yyyy020100-9054576900000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor1994-08-05T00:00:00.0000Z(GSN) IRIS/USGS (IU) and Zambia Geological Survey-15.277928.18821200falsetrue2015-09-13T00:00:00.0000Z-15.277928.188212002015-09-13T00:00:00.0000Zxxxx1yyyy120100044426500000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2015-09-13T00:00:00.0000Zxxxx2yyyy2201090043285600000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2015-09-13T00:00:00.0000Zxxxx0yyyy020100-9052714500000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2010-07-20T00:00:00.0000Z(GSN) IRIS/USGS (IU) and GSRAS59.5756150.77337falsetrue2014-09-29T00:00:00.0000Z59.5756150.773352014-09-29T00:00:00.0000Zxxxx1yyyy120120033454200000.02m/sCGfalse{"description":"Streckeisen STS-1/VBB Seismometer"}FDSNXML:Sensor2014-09-29T00:00:00.0000Zxxxx2yyyy2201290032632000000.02m/sCGfalse{"description":"Streckeisen STS-1/VBB Seismometer"}FDSNXML:Sensor2014-09-29T00:00:00.0000Zxxxx0yyyy020120-9038873100000.02m/sCGfalse{"description":"Streckeisen STS-1/VBB Seismometer"}FDSNXML:Sensor2008-08-01T00:00:00.0000Z(GSN) IRIS/USGS (IU) and IGN28.2502-16.5081991674falsetrue2022-12-20T13:30:00.0000Z28.2502-16.50819916742022-12-20T13:30:00.0000Zxxxx1yyyy14010351033286100000.02m/sCGfalse{"description":"Trillium T360 Surface Seismometer"}FDSNXML:Sensor2022-12-20T13:30:00.0000Zxxxx2yyyy2401081033286100000.02m/sCGfalse{"description":"Trillium T360 Surface Seismometer"}FDSNXML:Sensor2022-12-20T13:30:00.0000Zxxxx0yyyy040100-9033286100000.02m/sCGfalse{"description":"Trillium T360 Surface Seismometer"}FDSNXML:Sensor1990-08-18T00:00:00.0000Z(GSN) IRIS/USGS (IU) and JMA36.54567138.20406405falsetrue2012-08-03T18:54:00.0000Z36.54567138.204064052012-08-03T18:54:00.0000Zxxxx1yyyy12010357046433800000.05m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2012-08-03T18:54:00.0000Zxxxx2yyyy2201087046012800000.05m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2014-06-16T17:17:00.0000Zxxxx0yyyy020100-9060075600000.05m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2010-09-10T00:00:00.0000Z(GSN) IRIS/USGS (IU) and NNC Kazakhstan46.80881.977600falsetrue2016-12-13T05:00:00.0000Z46.80881.9776002016-12-13T05:00:00.0000Zxxxx1yyyy140100024478000000.02m/sCGfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor2016-12-13T05:00:00.0000Zxxxx2yyyy2401090024478000000.02m/sCGfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor2016-12-13T05:00:00.0000Zxxxx0yyyy040100-9024478000000.02m/sCGfalse{"description":"Streckeisen STS-2 Standard-gain"}FDSNXML:Sensor2001-08-25T00:00:00.0000Z(GSN) IRIS/USGS (IU) and Geoscience Australia-21.159119.7313190falsetrue2018-09-23T00:00:00.0000Z-21.159119.7313932018-09-23T00:00:00.0000Zxxxx1yyyy140197271019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-09-23T00:00:00.0000Zxxxx2yyyy2401971019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-09-23T00:00:00.0000Zxxxx0yyyy0401970-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor1999-10-23T00:00:00.0000Z(GSN) IRIS/USGS (IU) and NIED28.2156-177.369820falsetrue2013-07-28T03:00:00.0000Z28.2156-177.3698192013-07-28T03:00:00.0000Zxxxx1yyyy14011003.26284e+100.02m/sCGfalse{"description":"Streckeisen STS-2 High-gain"}FDSNXML:Sensor2013-07-28T03:00:00.0000Zxxxx2yyyy240119003.26284e+100.02m/sCGfalse{"description":"Streckeisen STS-2 High-gain"}FDSNXML:Sensor2013-07-28T03:00:00.0000Zxxxx0yyyy040110-903.26284e+100.02m/sCGfalse{"description":"Streckeisen STS-2 High-gain"}FDSNXML:Sensor1991-11-25T00:00:00.0000Z(GSN) IRIS/USGS (IU) and Geoscience Australia-32.9277117.239380falsetrue2018-09-15T00:00:00.0000Z-32.9277117.239278.12018-09-15T00:00:00.0000Zxxxx1yyyy140110272019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-09-15T00:00:00.0000Zxxxx2yyyy2401102162019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-09-15T00:00:00.0000Zxxxx0yyyy04011020-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2009-04-01T00:00:00.0000Z(GSN) IRIS/USGS (IU) and EPN0.2376-78.45083510falsetrue2015-12-10T20:00:00.0000Z0.2376-78.450834952015-12-10T20:00:00.0000Zxxxx1yyyy1201150045517100000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2015-12-13T00:40:00.0000Zxxxx0yyyy0201150-9041557600000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2021-04-15T00:00:00.0000Zxxxx2yyyy22011590044208400000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2009-08-14T00:00:00.0000Z(GSN) IRIS/USGS (IU) and IGN39.5446-4.3499950falsetrue2022-08-03T00:00:00.0000Z39.5446-4.34999482022-08-03T00:00:00.0000Zxxxx1yyyy1401219019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2022-08-03T00:00:00.0000Zxxxx2yyyy24012109019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2022-08-03T00:00:00.0000Zxxxx0yyyy040120-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor1998-06-19T00:00:00.0000Z(GSN) IRIS/USGS (IU), ECCD, and EPN-0.6742-90.2861270falsetrue2019-09-24T00:00:00.0000Z-0.6742-90.28611842019-09-24T00:00:00.0000Zxxxx1yyyy140186335019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2019-09-24T00:00:00.0000Zxxxx2yyyy24018665019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2019-09-24T00:00:00.0000Zxxxx0yyyy0401860-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor1993-08-28T00:00:00.0000Z(GSN) Global Seismograph Network (IRIS/USGS)53.0233158.6499110falsetrue2022-11-17T03:00:00.0000Z53.0233158.64991082022-11-17T03:00:00.0000Zxxxx1yyyy14012355033286100000.02m/sCGfalse{"description":"Trillium T360 Surface Seismometer"}FDSNXML:Sensor2022-11-17T03:00:00.0000Zxxxx2yyyy2401285033286100000.02m/sCGfalse{"description":"Trillium T360 Surface Seismometer"}FDSNXML:Sensor2022-11-17T03:00:00.0000Zxxxx0yyyy040120-9033286100000.02m/sCGfalse{"description":"Trillium T360 Surface Seismometer"}FDSNXML:Sensor1993-09-10T00:00:00.0000Z(GSN) IRIS/USGS (IU),GEOFON(GE)Poseidon,PM Geo Obs-9.4047147.159790falsetrue2011-07-29T18:28:00.0000Z-9.4047147.1597902011-07-29T18:28:00.0000Zxxxx1yyyy12010358032252500000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2011-07-29T18:28:00.0000Zxxxx2yyyy2201088032667400000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2011-07-29T18:28:00.0000Zxxxx0yyyy020100-9041226400000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor1993-03-03T00:00:00.0000Z(GSN) IRIS/USGS (IU) and NSF-OPP-64.7744-64.048940falsetrue2012-02-19T00:00:00.0000Z-64.7744-64.0489402012-02-19T00:00:00.0000Zxxxx1yyyy120100034779500000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2012-02-19T00:00:00.0000Zxxxx2yyyy2201090048084000000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2012-02-19T00:00:00.0000Zxxxx0yyyy020100-9041675000000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor1999-11-13T00:00:00.0000Z(GSN) IRIS/USGS (IU) and NIED19.7573-155.53261990falsetrue2018-10-25T00:00:00.0000Z19.7573-155.53261913.12018-10-25T00:00:00.0000Zxxxx1yyyy14017795019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-10-25T00:00:00.0000Zxxxx2yyyy240177185019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-10-25T00:00:00.0000Zxxxx0yyyy0401770-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor1996-12-29T00:00:00.0000Z(GSN) IRIS/USGS (IU), NIED, and British Geo. Surv.-25.0713-130.0953220falsetrue2014-11-30T00:00:00.0000Z-25.0713-130.09532182014-11-30T00:00:00.0000Zxxxx1yyyy140120024478000000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2014-11-30T00:00:00.0000Zxxxx2yyyy2401290024478000000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2014-11-30T00:00:00.0000Zxxxx0yyyy040120-9024478000000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2002-12-29T00:00:00.0000Z(GSN) IRIS/USGS (IU) and NSF-OPP-89.9289144.43822850falsetrue2011-01-18T00:00:00.0000Z-89.9289144.438225802011-01-18T00:00:00.0000Zxxxx1yyyy1201270176031642200000.02m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor2011-01-18T00:00:00.0000Zxxxx2yyyy2201270266033353400000.02m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor2011-01-18T00:00:00.0000Zxxxx0yyyy02012700-9032330000000.02m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor2004-07-18T00:00:00.0000Z(GSN) IRIS/USGS (IU), NIED, and GNS Science-29.245-177.92960falsetrue2013-05-17T21:00:00.0000Z-29.245-177.92959.52013-05-17T21:00:00.0000Zxxxx1yyyy14010.50024478000000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2013-05-17T21:00:00.0000Zxxxx2yyyy24010.590024478000000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2013-05-17T21:00:00.0000Zxxxx0yyyy04010.50-9024478000000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor1992-03-07T00:00:00.0000Z(GSN) IRIS/USGS (IU) and GNS Science-21.2125-159.773328falsetrue2018-03-28T00:00:00.0000Z-21.2125-159.7733-732018-03-28T00:00:00.0000Zxxxx1yyyy1401101269019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-03-28T00:00:00.0000Zxxxx2yyyy2401101359019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-03-28T00:00:00.0000Zxxxx0yyyy04011010-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2009-07-29T00:00:00.0000Z(GSN) IRIS/USGS (IU) and UFRN-5.8274-35.9014400falsetrue2015-10-13T00:00:00.0000Z-5.8274-35.90142922015-10-13T00:00:00.0000Zxxxx1yyyy1201108264034192300000.02m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor2015-10-13T00:00:00.0000Zxxxx2yyyy2201108354034494300000.02m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor2015-10-13T00:00:00.0000Zxxxx0yyyy02011080-9034242600000.02m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor2009-09-09T00:00:00.0000Z(GSN) IRIS/USGS (IU) and ANSS44.1212-104.03592090falsetrue2022-07-21T20:00:00.0000Z44.1212-104.035920242022-07-21T20:00:00.0000Zxxxx1yyyy140166175019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2022-07-21T20:00:00.0000Zxxxx2yyyy240166265019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2022-07-21T20:00:00.0000Zxxxx0yyyy0401660-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2003-05-05T00:00:00.0000Z(GSN) IRIS/USGS (IU) and Univ. of Brasilia-8.9489-63.1831120falsetrue2017-02-14T15:30:00.0000Z-8.9489-63.183192017-02-14T15:30:00.0000Zxxxx1yyyy1201111140034142000000.02m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor2017-02-14T15:30:00.0000Zxxxx2yyyy2201111230034595000000.02m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor2017-02-14T15:30:00.0000Zxxxx0yyyy02011110-9032564900000.02m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor1998-10-28T00:00:00.0000Z(GSN) IRIS/USGS (IU), NSF-OPP, and GNS Science-77.8492166.757250falsetrue2014-12-04T00:00:00.0000Z-77.8492166.7572482014-12-04T00:00:00.0000Zxxxx1yyyy140120024478000000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2014-12-04T00:00:00.0000Zxxxx2yyyy2401290024478000000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2014-12-04T00:00:00.0000Zxxxx0yyyy040120-9024478000000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor1994-08-19T00:00:00.0000Z(GSN) IRIS/USGS (IU) and FUNVISIS8.8839-70.6341620falsetrue2020-08-05T13:00:00.0000Z8.8839-70.63415892020-08-05T13:00:00.0000Zxxxx1yyyy140131003.26284e+100.02m/sCGfalse{"description":"Streckeisen STS-2 High-gain"}FDSNXML:Sensor2020-08-05T13:00:00.0000Zxxxx2yyyy2401319003.26284e+100.02m/sCGfalse{"description":"Streckeisen STS-2 High-gain"}FDSNXML:Sensor2020-08-05T13:00:00.0000Zxxxx0yyyy0401310-903.26284e+100.02m/sCGfalse{"description":"Streckeisen STS-2 High-gain"}FDSNXML:Sensor2005-01-31T16:00:00.0000Z(GSN) IRIS/USGS (IU), GEOFON (GE), and GEUS66.9961-50.62076330falsetrue2016-01-20T13:00:00.0000Z66.9961-50.620763302016-01-20T13:00:00.0000Zxxxx1yyyy120100033856800000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2016-01-20T13:00:00.0000Zxxxx2yyyy2201090046171400000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2016-01-20T13:00:00.0000Zxxxx0yyyy020100-9040651600000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2010-04-16T00:00:00.0000Z(GSN) IRIS/USGS (IU)18.1091-66.15420falsetrue2017-08-31T00:00:00.0000Z18.1091-66.154192017-08-31T00:00:00.0000Zxxxx1yyyy14011358018287200000.02m/sCGfalse{"description":"Trillium 360"}FDSNXML:Sensor2017-08-31T00:00:00.0000Zxxxx2yyyy2401188018287200000.02m/sCGfalse{"description":"Trillium 360"}FDSNXML:Sensor2017-08-31T00:00:00.0000Zxxxx0yyyy040110-9018287200000.02m/sCGfalse{"description":"Trillium 360"}FDSNXML:Sensor2008-12-15T00:00:00.0000Z(GSN) IRIS/USGS (IU) and CICESE23.6858-109.9443825falsetrue2022-02-09T16:15:00.0000Z23.6858-109.94438252022-02-09T16:15:00.0000Zxxxx1yyyy14010003.26284e+100.02m/sCGfalse{"description":"Streckeisen STS-2 High-gain"}FDSNXML:Sensor2022-02-09T16:15:00.0000Zxxxx2yyyy240109003.26284e+100.02m/sCGfalse{"description":"Streckeisen STS-2 High-gain"}FDSNXML:Sensor2022-02-09T16:15:00.0000Zxxxx0yyyy040100-903.26284e+100.02m/sCGfalse{"description":"Streckeisen STS-2 High-gain"}FDSNXML:Sensor1992-04-07T00:00:00.0000Z(GSN) IRIS/USGS (IU) and GNS Science-41.3087174.7043120falsetrue2018-03-21T00:00:00.0000Z-41.3087174.7043292018-03-21T00:00:00.0000Zxxxx1yyyy140191107019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-03-21T00:00:00.0000Zxxxx2yyyy240191197019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2018-03-21T00:00:00.0000Zxxxx0yyyy0401910-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2010-09-27T00:00:00.0000Z(GSN) IRIS/USGS (IU), Penn. State Univ., and ANSS40.6358-77.8876270falsetrue2017-09-23T17:30:00.0000Z40.6358-77.88761932017-09-23T17:30:00.0000Zxxxx1yyyy140177286019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2017-09-23T17:30:00.0000Zxxxx2yyyy24017716019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2017-09-23T17:30:00.0000Zxxxx0yyyy0401770-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2009-11-13T00:10:27.0000Z(GSN) IRIS/USGS (IU),NIED, Kiribati Weather Serv1.3549172.922920falsetrue2019-08-21T22:00:00.0000Z1.3549172.9229182019-08-21T22:00:00.0000Zxxxx1yyyy140120024478000000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2019-08-21T22:00:00.0000Zxxxx2yyyy2401290024478000000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2019-08-21T22:00:00.0000Zxxxx0yyyy040120-9024478000000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2009-09-22T13:56:41.0000Z(GSN) IRIS/USGS (IU) and Academia Sinica24.9735121.4971160falsetrue2009-09-22T13:56:41.0000Z24.9735121.497177.12009-09-22T13:56:41.0000Zxxxx1yyyy120182.9330034922500000.02m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor2009-09-22T13:56:41.0000Zxxxx2yyyy220182.960034405600000.02m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor2009-09-22T13:56:41.0000Zxxxx0yyyy020182.90-9033627900000.02m/sCGfalse{"description":"Geotech KS-54000 Borehole Seismometer"}FDSNXML:Sensor1996-11-01T00:00:00.0000Z(GSN) IRIS/USGS (IU) and UNAM20.2263-88.276340falsetrue2014-10-30T00:00:00.0000Z20.2263-88.2763402014-10-30T00:00:00.0000Zxxxx1yyyy140100024478100000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2014-10-30T00:00:00.0000Zxxxx2yyyy2401090024478100000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2014-10-30T00:00:00.0000Zxxxx0yyyy040100-9024478100000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor1995-08-15T00:00:00.0000Z(GSN) IRIS/USGS (IU) and GSRAS71.6341128.866740falsetrue2014-09-21T00:00:00.0000Z71.6341128.8667402014-09-21T00:00:00.0000Zxxxx1yyyy120100036492900000.05m/sCGfalse{"description":"Streckeisen STS-1/VBB Seismometer"}FDSNXML:Sensor2014-09-21T00:00:00.0000Zxxxx2yyyy2201090035821800000.05m/sCGfalse{"description":"Streckeisen STS-1/VBB Seismometer"}FDSNXML:Sensor2019-07-19T00:00:00.0000Zxxxx0yyyy0201009040234500000.05m/sCGfalse{"description":"Streckeisen STS-1/VBB Seismometer"}FDSNXML:Sensor2004-03-03T00:00:00.0000Z(GSN) IRIS/USGS (IU), GEOSCOPE (G), and BGS-37.0681-12.315260falsetrue2016-03-18T00:00:00.0000Z-37.0681-12.3152582016-03-18T00:00:00.0000Zxxxx1yyyy140120024478000000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2016-03-18T00:00:00.0000Zxxxx2yyyy2401290024478000000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2016-03-18T00:00:00.0000Zxxxx0yyyy040120-9024478000000.02m/sCGfalse{"description":"Streckeisen STS-2.5"}FDSNXML:Sensor2009-05-19T20:00:00.0000Z(GSN) IRIS/USGS (IU), Univ. of La Plata, INPRES-38.0568-61.9787540falsetrue2022-03-17T18:22:00.0000Z-38.0568-61.97875082022-03-17T18:22:00.0000Zxxxx1yyyy140132107019746800000.02m/sCGfalse{"description":"T120 post hole, quiet"}FDSNXML:Sensor2022-03-17T18:22:00.0000Zxxxx2yyyy240132197019746800000.02m/sCGfalse{"description":"T120 post hole, quiet"}FDSNXML:Sensor2022-03-17T18:22:00.0000Zxxxx0yyyy0401320-9019746800000.02m/sCGfalse{"description":"T120 post hole, quiet"}FDSNXML:Sensor1994-08-19T00:00:00.0000Z(GSN) IRIS/USGS (IU) and Geological Survey Namibia-19.202217.58381260falsetrue2015-09-03T12:00:00.0000Z-19.202217.583812602015-09-03T12:00:00.0000Zxxxx1yyyy120100032816600000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2015-09-03T12:00:00.0000Zxxxx2yyyy2201090032866900000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2015-09-03T12:00:00.0000Zxxxx0yyyy020100-9041237100000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2009-05-28T19:40:00.0000Z(GSN) IRIS/USGS (IU), Univ. of Arizona, and ANSS32.3098-110.7847910falsetrue2021-09-24T00:00:00.0000Z32.3098-110.78479092021-09-24T00:00:00.0000Zxxxx1yyyy140110019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2021-09-24T00:00:00.0000Zxxxx2yyyy2401190019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2021-09-24T00:00:00.0000Zxxxx0yyyy040110-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor1994-10-31T00:00:00.0000Z(GSN) IRIS/USGS (IU) and Mongolia Acad. of Science47.8651107.05321610falsetrue2013-09-29T00:00:00.0000Z47.8651107.053216102013-09-29T00:00:00.0000Zxxxx1yyyy120100033957500000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2013-09-29T00:00:00.0000Zxxxx2yyyy2201090033403800000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2013-09-29T00:00:00.0000Zxxxx0yyyy020100-9041708600000.02m/sCGfalse{"description":"Streckeisen STS-1VBB w/E300"}FDSNXML:Sensor2007-11-15T00:00:00.0000Z(GSN) IRIS/USGS (IU)19.2834166.65220falsetrue2019-08-08T20:10:00.0000Z19.2834166.652-532019-08-08T20:10:00.0000Zxxxx1yyyy14017357018287200000.02m/sCGfalse{"description":"Trillium 360"}FDSNXML:Sensor2019-08-08T20:10:00.0000Zxxxx2yyyy240173147018287200000.02m/sCGfalse{"description":"Trillium 360"}FDSNXML:Sensor2019-08-08T20:10:00.0000Zxxxx0yyyy0401730-9018287200000.02m/sCGfalse{"description":"Trillium 360"}FDSNXML:Sensor1997-07-17T00:00:00.0000Z(GSN) IRIS/USGS (IU), St Louis Univ., and ANSS38.2289-86.2939210falsetrue2021-05-18T19:00:00.0000Z38.2289-86.2939782021-05-18T19:00:00.0000Zxxxx1yyyy14011320033218900000.02m/sCGfalse{"description":"Trillium T360 Surface Seismometer"}FDSNXML:Sensor2021-05-18T19:00:00.0000Zxxxx2yyyy240113290033353100000.02m/sCGfalse{"description":"Trillium T360 Surface Seismometer"}FDSNXML:Sensor2021-05-18T19:00:00.0000Zxxxx0yyyy04011320-9033286000000.02m/sCGfalse{"description":"Trillium T360 Surface Seismometer"}FDSNXML:Sensor1997-04-16T00:00:00.0000Z(GSN) IRIS/USGS (IU), St Louis Univ., and ANSS36.1297-87.83170falsetrue2017-11-16T00:00:00.0000Z36.1297-87.83702017-11-16T00:00:00.0000Zxxxx1yyyy1401100304019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2017-11-16T00:00:00.0000Zxxxx2yyyy240110034019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor2017-11-16T00:00:00.0000Zxxxx0yyyy04011000-9019847500000.02m/sCGfalse{"description":"Streckeisen STS-6A VBB Seismometer"}FDSNXML:Sensor1997-08-22T00:00:00.0000Z(GSN) IRIS/USGS (IU), NIED, Kiribati Weather Serv2.0448-157.445720falsetrue2018-07-06T20:00:00.0000Z2.0448-157.4457192018-07-06T20:00:00.0000Zxxxx1yyyy14011316018287200000.02m/sCGfalse{"description":"Trillium 360"}FDSNXML:Sensor2018-07-06T20:00:00.0000Zxxxx2yyyy2401146018287200000.02m/sCGfalse{"description":"Trillium 360"}FDSNXML:Sensor2018-07-06T20:00:00.0000Zxxxx0yyyy040110-9018287200000.02m/sCGfalse{"description":"Trillium 360"}FDSNXML:Sensor1993-08-31T00:00:00.0000Z(GSN) IRIS/USGS (IU) and GSRAS62.031129.6805110falsetrue2013-05-28T00:00:00.0000Z62.031129.6805962013-05-28T00:00:00.0000Zxxxx0yyyy0201140-9041056700000.05m/sCGfalse{"description":"Streckeisen STS-1/VBB Seismometer"}FDSNXML:Sensor2020-01-01T00:00:00.0000Zxxxx1yyyy1201140032447500000.02m/sCGfalse{"description":"Streckeisen STS-1/VBB Seismometer"}FDSNXML:Sensor2020-01-01T00:00:00.0000Zxxxx2yyyy22011490031659000000.02m/sCGfalse{"description":"Streckeisen STS-1/VBB Seismometer"}FDSNXML:Sensor1992-05-31T00:00:00.0000Z(GSN) IRIS/USGS (IU), Pacific 21, and GSRAS46.9587142.7604150falsetrue2013-11-20T04:30:00.0000Z46.9587142.76041482013-11-20T04:30:00.0000Zxxxx0yyyy020120-9043671500000.02m/sCGfalse{"description":"Streckeisen STS-1/VBB Seismometer"}FDSNXML:Sensor2018-01-01T00:00:00.0000Zxxxx2yyyy2201290030518000000.02m/sCGfalse{"description":"Streckeisen STS-1/VBB Seismometer"}FDSNXML:Sensor2019-09-11T04:00:00.0000Zxxxx1yyyy120120035769300000.02m/sCGfalse{"description":"Streckeisen STS-1/VBB Seismometer"}FDSNXML:Sensor diff --git a/etc/inventory/inventory_TX.xml b/etc/inventory/inventory_TX.xml new file mode 100644 index 0000000..045fd88 --- /dev/null +++ b/etc/inventory/inventory_TX.xml @@ -0,0 +1,2 @@ + +Trillium 120 Posthole S/N 0545, Centaur S/N 1792M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0545, Centaur S/N 1792M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0545, Centaur S/N 1792M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0543M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0543M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0543M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 533, Centaur S/N 1779M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 533, Centaur S/N 1779M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 533, Centaur S/N 1779M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4099, Centaur S/N 1630M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4099, Centaur S/N 1630M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4099, Centaur S/N 1630M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0531, Centaur S/N 1778M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0531, Centaur S/N 1778M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0531, Centaur S/N 1778M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4140, Centaur S/N 1710M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4140, Centaur S/N 1710M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4140, Centaur S/N 1710M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0529, Centaur S/N 1774M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0529, Centaur S/N 1774M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0529, Centaur S/N 1774M/S{"unit":"Velocity in Meters Per Second"}40TM/S{"unit":"Velocity in Metres Per Second"}40TM/S{"unit":"Velocity in Metres Per Second"}40TM/S{"unit":"Velocity in Metres Per Second"}Trillium 20s Compact S/N 4129, Centaur S/N 1665M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4129, Centaur S/N 1665M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4129, Centaur S/N 1665M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4116, Centaur S/N 1657M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4116, Centaur S/N 1657M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4116, Centaur S/N 1657M/S{"unit":"Velocity in Meters Per Second"}Trillium Compact 20s S/N 4131, Centaur S/N 0191M/S{"unit":"Velocity in Meters Per Second"}Trillium Compact 20s S/N 4131, Centaur S/N 0191M/S{"unit":"Velocity in Meters Per Second"}Trillium Compact 20s S/N 4131, Centaur S/N 0191M/S{"unit":"Velocity in Meters Per Second"}Trillium Compact 20s S/N - 4149 Centaur - 1798M/S{"unit":"Velocity in Metres Per Second"}Trillium Compact 20s S/N - 4149 Centaur - 1798M/S{"unit":"Velocity in Metres Per Second"}Trillium Compact 20s S/N - 4149 Centaur - 1798M/S{"unit":"Velocity in Metres Per Second"}Trillium 20s Compact S/N 4118, Centaur S/N 1754M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4118, Centaur S/N 1754M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4118, Centaur S/N 1754M/S{"unit":"Velocity in Meters Per Second"}Treillium 20s posthole, sn 004139 cenentaur 001755M/S**2Treillium 20s posthole, sn 004139 cenentaur 001755M/S**2Trillium 20s Compact S/N 4139, Centaur S/N 1755M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4098, Centaur S/N 1756M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4098, Centaur S/N 1756M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4098, Centaur S/N 1756M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4117, Centaur S/N 1757M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4117, Centaur S/N 1757M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4117, Centaur S/N 1757M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4111, Centaur S/N 1758M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4111, Centaur S/N 1758M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4111, Centaur S/N 1758M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s compact S/N 4108, Centaur S/N 1762M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s compact S/N 4108, Centaur S/N 1762M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s compact S/N 4108, Centaur S/N 1762M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4109, Centaur S/N 1765M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4109, Centaur S/N 1765M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4109, Centaur S/N 1765M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4107, Centaur S/N 1763M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4107, Centaur S/N 1763M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4107, Centaur S/N 1763M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4109, Centaur S/N 1765M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4109, Centaur S/N 1765M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4109, Centaur S/N 1765M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4112, Centaur S/N 1760M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4112, Centaur S/N 1760M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4112, Centaur S/N 1760M/S{"unit":"Velocity in Meters Per Second"}Trillium 120s Compact Centaur S/N 0237M/S{"unit":"Velocity in Meters Per Second"}Trillium 120s Compact Centaur S/N 0237M/S{"unit":"Velocity in Meters Per Second"}Trillium 120s Compact Centaur S/N 0237M/S{"unit":"Velocity in Meters Per Second"}Trillium 120s Compact 2360 Centaur 0191M/S{"unit":"Velocity in Meters Per Second"}Trillium 120s Compact 2360 Centaur 0191M/S{"unit":"Velocity in Meters Per Second"}Trillium 120s Compact 2360 Centaur 0191M/S{"unit":"Velocity in Meters Per Second"}Trillium Compact S/N 2389, Centaur S/N 0249M/S{"unit":"Velocity in Meters Per Second"}Trillium Compact S/N 2389, Centaur S/N 0249M/S{"unit":"Velocity in Meters Per Second"}Trillium Compact S/N 2389, Centaur S/N 0249M/S{"unit":"Velocity in Meters Per Second"}Guralp 40T S/N - T43879, Minimus S/N MIN-C659m/s{"unit":"Velocity in Meters Per Second"}Guralp 40T S/N - T43879, Minimus S/N MIN-C659m/s{"unit":"Velocity in Meters Per Second"}Guralp 40T S/N - T43879, Minimus S/N MIN-C659m/s{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 539, Centaur S/N 1775M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 539, Centaur S/N 1775M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 539, Centaur S/N 1775M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 535, Centaur S/N 1692M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 535, Centaur S/N 1692M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4138, Centaur S/N 1716M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4138, Centaur S/N 1716M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4138, Centaur S/N 1716M/S{"unit":"Velocity in Meters Per Second"}Guralp CMG-3TDM/S{"unit":"Velocity in Meters per Second"}Guralp CMG-3TDM/S{"unit":"Velocity in Meters per Second"}Guralp CMG-3TDM/S{"unit":"Velocity in Meters per Second"}Guralp CMG-3TDm/s{"unit":"Velocity in Meters per Second"}Guralp CMG-3TDm/s{"unit":"Velocity in Meters per Second"}Guralp CMG-3TDm/s{"unit":"Velocity in Meters per Second"}Guralp CMG-3TDM/S{"unit":"Velocity in Meters per Second"}Guralp CMG-3TDM/S{"unit":"Velocity in Meters per Second"}Guralp CMG-3TDM/S{"unit":"Velocity in Meters per Second"}Guralp CMG-3TDM/S{"unit":"Velocity in Meters per Second"}Guralp CMG-3TDM/S{"unit":"Velocity in Meters per Second"}Guralp CMG-3TDM/S{"unit":"Velocity in Meters per Second"}Guralp CMG-3TDM/S{"unit":"Velocity in Meters per Second"}Guralp CMG-3TDM/S{"unit":"Velocity in Meters per Second"}Guralp CMG-3TDM/S{"unit":"Velocity in Meters per Second"}Guralp CMG-3TDM/S{"unit":"Velocity in Meters per Second"}Guralp CMG-3TDM/S{"unit":"Velocity in Meters per Second"}Guralp CMG-3TDM/S{"unit":"Velocity in Meters per Second"}Guralp CMG-40TCMG-40TGuralpM/SGuralp CMG-40TCMG-40TGuralpM/SGuralp CMG-40TCMG-40TGuralpM/STrillium 120 Posthole S/N 0526, Centaur S/N 1791M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0526, Centaur S/N 1791M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0526, Centaur S/N 1791M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0527, Centaur S/N 1781M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0527, Centaur S/N 1781M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0527, Centaur S/N 1781M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0537, Centaur S/N 1655M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0537, Centaur S/N 1655M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0537, Centaur S/N 1655M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0536, Centaur S/N 1788M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0536, Centaur S/N 1788M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0536, Centaur S/N 1788M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4132, Centaur S/N 1663M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4132, Centaur S/N 1663M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4132, Centaur S/N 1663M/S{"unit":"Velocity in Meters Per Second"}Guralp 40T, Minimus+ MINP-005A S/N 90m/s{"unit":"Velocity in Meters per Second"}Guralp 40T, Minimus+ MINP-005A S/N 90m/s{"unit":"Velocity in Meters per Second"}Guralp 40T, Minimus+ MINP-005A S/N 90m/s{"unit":"Velocity in Meters per Second"}Trillium 20s Compact S/N 4147, Centaur S/N 1798M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4147, Centaur S/N 1798M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4147, Centaur S/N 1798M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4101, Centaur S/N 1628M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4101, Centaur S/N 1628M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4101, Centaur S/N 1628M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4148, Centaur S/N 1767M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4148, Centaur S/N 1767M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4148, Centaur S/N 1767M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4121, Centaur S/N 1624M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4121, Centaur S/N 1624M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4121, Centaur S/N 1624M/S{"unit":"Velocity in Meters Per Second"}Centaur S/N 1764, Silicon Audio 203P-15m/s{"unit":"Velocity in Meters per Second"}Centaur S/N 1764, Silicon Audio 203P-15m/s{"unit":"Velocity in Meters per Second"}Centaur S/N 1764, Silicon Audio 203P-15m/s{"unit":"Velocity in Meters per Second"}Trillium 20s Compact S/N 4110, Centaur S/N 1662M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4110, Centaur S/N 1662M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4110, Centaur S/N 1662M/S{"unit":"Velocity in Meters Per Second"}Guralp 40T S/N T43896, Minimus+ S/N MINP-035Am/s{"unit":"Velocity in Metres Per Second"}Guralp 40T S/N T43896, Minimus+ S/N MINP-035Am/s{"unit":"Velocity in Metres Per Second"}Guralp 40T S/N T43896, Minimus+ S/N MINP-035Am/s{"unit":"Velocity in Metres Per Second"}Trillium 20s Compact S/N 4127, Centaur S/N 1765M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4127, Centaur S/N 1765M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4127, Centaur S/N 1765M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4120, Centaur S/N 1759M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4120, Centaur S/N 1759M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4120, Centaur S/N 1759M/S{"unit":"Velocity in Meters Per Second"}Guralp 40T, Minimus MIN-C559 S/N 50521m/s{"unit":"Velocity in Meters Per Second"}Guralp 40T, Minimus MIN-C559 S/N 50521m/s{"unit":"Velocity in Meters Per Second"}Guralp 40T, Minimus MIN-C559 S/N 50521m/s{"unit":"Velocity in Meters Per Second"}Guralp 40T S/N T43883, Minimus S/N MIN-C959M/S{"unit":"Velocity in Metres Per Second"}Guralp 40T S/N T43883, Minimus S/N MIN-C959M/S{"unit":"Velocity in Metres Per Second"}Guralp 40T S/N T43883, Minimus S/N MIN-C959M/S{"unit":"Velocity in Metres Per Second"}1CMG-40T S/N T43837, Minimus+ S/N MINP-025AM/S{"unit":"Velocity in Metres Per Second"}1CMG-40T S/N T43837, Minimus+ S/N MINP-025AM/S{"unit":"Velocity in Metres Per Second"}1CMG-40T S/N T43837, Minimus+ S/N MINP-025AM/S{"unit":"Velocity in Metres Per Second"}Guralp CMG-6TD S/N - T6A94/C1146M/S{"unit":"Velocity in Meters Per Second"}Guralp CMG-6TD S/N - T6A94/C1146M/S{"unit":"Velocity in Meters Per Second"}Guralp CMG-6TD S/N - T6A94/C1146M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4130, Centaur S/N 1659M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4130, Centaur S/N 1659M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4130, Centaur S/N 1659M/S{"unit":"Velocity in Meters Per Second"}unknownm/s{"unit":"Velocity in Meters per Second"}unknownm/s{"unit":"Velocity in Meters per Second"}unknownm/s{"unit":"Velocity in Meters per Second"}Guralp CMG-6TD S/N - T6A82/C1144M/S{"unit":"Velocity in Meters Per Second"}Guralp CMG-6TD S/N - T6A82/C1144M/S{"unit":"Velocity in Meters Per Second"}Guralp CMG-6TD S/N - T6A82/C1144M/S{"unit":"Velocity in Meters Per Second"}Guralp 40T S/N T43844m/s{"unit":"Velocity in Metres Per Second"}Guralp 40T S/N T43844m/s{"unit":"Velocity in Metres Per Second"}Guralp 40T S/N T43844m/s{"unit":"Velocity in Metres Per Second"}1Trillium 20s Compact S/N 4109, Centaur S/N 1631m/s{"unit":"Velocity in Meters Per Second"}1Trillium 20s Compact S/N 4109, Centaur S/N 1631m/s{"unit":"Velocity in Meters Per Second"}1Trillium 20s Compact S/N 4109, Centaur S/N 1631m/s{"unit":"Velocity in Meters Per Second"}Guralp CMG-6TCMG-6TGuralpM/SGuralp CMG-6TCMG-6TGuralpM/SGuralp CMG-6TCMG-6TGuralpM/STrillium Compact 20s S/N 4132, Centaur S/N 191m/s{"unit":"Velocity in Meters per Second"}Trillium Compact 20s S/N 4132, Centaur S/N 191m/s{"unit":"Velocity in Meters per Second"}Trillium Compact 20s S/N 4132, Centaur S/N 191m/s{"unit":"Velocity in Meters per Second"}Silicon Audio 213-40 750 voltm/s{"unit":"Velocity in Meters per Second"}Silicon Audio 213-40 750 voltm/s{"unit":"Velocity in Meters per Second"}Silicon Audio 213-40 750 voltm/s{"unit":"Velocity in Meters per Second"}Guralp 6TD S/N 6A93m/s{"unit":"Velocity in Meters per Second"}Guralp 6TD S/N 6A93m/s{"unit":"Velocity in Meters per Second"}Guralp 6TD S/N 6A93m/s{"unit":"Velocity in Meters per Second"}Guralp CMG 6TD S/N 6A77m/s{"unit":"Velocity in Meters per Second"}Guralp CMG 6TD S/N 6A77m/s{"unit":"Velocity in Meters per Second"}Guralp CMG 6TD S/N 6A77m/s{"unit":"Velocity in Meters per Second"}Guralp CMG 6TD S/N 6V98m/s{"unit":"Velocity in Meters per Second"}Guralp CMG 6TD S/N 6V98m/s{"unit":"Velocity in Meters per Second"}Guralp CMG 6TD S/N 6V98m/s{"unit":"Velocity in Meters per Second"}Trillium 120 Posthole S/N 0530, Centaur S/N 1789M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0530, Centaur S/N 1789M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0530, Centaur S/N 1789M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4134 Centaur S/N 1753M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4134 Centaur S/N 1753M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4134 Centaur S/N 1753M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4133, Centaur S/N 1654M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4133, Centaur S/N 1654M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4133, Centaur S/N 1654M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0544, Centaur S/N 1658M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0544, Centaur S/N 1658M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0544, Centaur S/N 1658M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0523, Centaur S/N 1660M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0523, Centaur S/N 1660M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0523, Centaur S/N 1660M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0541, Centaur S/N 1635M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0541, Centaur S/N 1635M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0541, Centaur S/N 1635M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0534, Centaur S/N 1702M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0534, Centaur S/N 1702M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0534, Centaur S/N 1702M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0540, Centaur S/N 1694M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0540, Centaur S/N 1694M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0540, Centaur S/N 1694M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0518, Centaur S/N 1636M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0518, Centaur S/N 1636M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0518, Centaur S/N 1636M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4141, Centaur S/N 1752M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4141, Centaur S/N 1752M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4141, Centaur S/N 1752M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4132, Centaur S/N 1629M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4132, Centaur S/N 1629M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4132, Centaur S/N 1629M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4130, Centaur S/N 1782M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4130, Centaur S/N 1782M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4130, Centaur S/N 1782M/S{"unit":"Velocity in Meters Per Second"}Centaur S/N 1633, Trillium Compact 20s S/N 4119M/S{"unit":"Velocity in Meters Per Second"}Centaur S/N 1633, Trillium Compact 20s S/N 4119M/S{"unit":"Velocity in Meters Per Second"}Centaur S/N 1633, Trillium Compact 20s S/N 4119M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 2361, Centaur S/N 1663M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 2361, Centaur S/N 1663M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 2361, Centaur S/N 1663M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4131, Centaur S/N 1711M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4131, Centaur S/N 1711M/S{"unit":"Velocity in Meters Per Second"}Trillium 20s Compact S/N 4131, Centaur S/N 1711M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0532, Centaur S/N 1622M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0532, Centaur S/N 1622M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0532, Centaur S/N 1622M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0528, Centaur S/N 1664M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0528, Centaur S/N 1664M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 0528, Centaur S/N 1664M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 1799, Centaur S/N 0542M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 1799, Centaur S/N 0542M/S{"unit":"Velocity in Meters Per Second"}Trillium 120 Posthole S/N 1799, Centaur S/N 0542M/S{"unit":"Velocity in Meters Per Second"}Tillium 120 Posthole S/N 0522, Centaur S/N 1680M/S{"unit":"Velocity in Meters Per Second"}Tillium 120 Posthole S/N 0522, Centaur S/N 1680M/S{"unit":"Velocity in Meters Per Second"}Tillium 120 Posthole S/N 0522, Centaur S/N 1680M/S{"unit":"Velocity in Meters Per Second"}4000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124098430ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.1924097490ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.1924096840ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.1924000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.1210ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.35924.241 ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.1210ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.35924.241 ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.1210ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.35924.241 ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124095790ResponseFIR/20230413103931.407449.659 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.1924097100ResponseFIR/20230413103931.407449.659 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.1924096150ResponseFIR/20230413103931.407449.659 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.1924000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.1210ResponsePAZ/20230413103931.334137.8 ResponsePAZ/20230413103931.334246.9 ResponseFIR/20230413103931.35924.241 ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.127830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.7667830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.7667830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.7667830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.7667830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.7667830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.7667830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.7667830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.7667830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.7667830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.7667830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.7667830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.7667830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.7667830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.7667830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.7667830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.7667830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.7667830850ResponseFIR/20230413103931.416922.760 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417034.761 ResponseFIR/20230413103931.417258.764 ResponseFIR/20230413103931.417352.765 ResponseFIR/20230413103931.417449.766MinimusGuralp4098360MinimusGuralp4098360MinimusGuralp40983604000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124096570ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.1924096710ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.1924097070ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.1924000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.1210ResponseFIR/20230413103931.473587.1241 ResponseFIR/20230413103931.473695.1242 ResponseFIR/20230413103931.473785.1243 ResponseFIR/20230413103931.473875.124410ResponseFIR/20230413103931.473587.1241 ResponseFIR/20230413103931.473695.1242 ResponseFIR/20230413103931.473785.1243 ResponseFIR/20230413103931.473875.124410ResponseFIR/20230413103931.473587.1241 ResponseFIR/20230413103931.473695.1242 ResponseFIR/20230413103931.473785.1243 ResponseFIR/20230413103931.473875.12444000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124096660ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.479355.12964096880ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.479355.12964098130ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.479355.12964000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124096290ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.488403.1372 ResponseFIR/20230413103931.488403.13724096290ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.488403.1372 ResponseFIR/20230413103931.488403.13724096290ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.488403.1372 ResponseFIR/20230413103931.488403.13724095960ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.1924098320ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.1924098170ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.1924098010ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.1924099070ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.1924097990ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.19210204100ResponseFIR/20230413103931.496808.1440 ResponseFIR/20230413103931.496889.1441 ResponseFIR/20230413103931.49695.1442 ResponseFIR/20230413103931.497009.144310204100ResponseFIR/20230413103931.496808.1440 ResponseFIR/20230413103931.496889.1441 ResponseFIR/20230413103931.49695.1442 ResponseFIR/20230413103931.497009.144310204100ResponseFIR/20230413103931.496808.1440 ResponseFIR/20230413103931.496889.1441 ResponseFIR/20230413103931.49695.1442 ResponseFIR/20230413103931.497009.14434000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.1210ResponseFIR/20230413103931.503302.1492 ResponseFIR/20230413103931.503427.1493 ResponseFIR/20230413103931.503553.1494 ResponseFIR/20230413103931.503683.1495 ResponseFIR/20230413103931.503832.149610ResponseFIR/20230413103931.503302.1492 ResponseFIR/20230413103931.503427.1493 ResponseFIR/20230413103931.503553.1494 ResponseFIR/20230413103931.503683.1495 ResponseFIR/20230413103931.503832.149610ResponseFIR/20230413103931.503302.1492 ResponseFIR/20230413103931.503427.1493 ResponseFIR/20230413103931.503553.1494 ResponseFIR/20230413103931.503683.1495 ResponseFIR/20230413103931.503832.149610204100ResponseFIR/20230413103931.496808.1440 ResponseFIR/20230413103931.496889.1441 ResponseFIR/20230413103931.49695.1442 ResponseFIR/20230413103931.497009.144310204100ResponseFIR/20230413103931.496808.1440 ResponseFIR/20230413103931.496889.1441 ResponseFIR/20230413103931.49695.1442 ResponseFIR/20230413103931.497009.144310204100ResponseFIR/20230413103931.496808.1440 ResponseFIR/20230413103931.496889.1441 ResponseFIR/20230413103931.49695.1442 ResponseFIR/20230413103931.497009.14434096030ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.1924097710ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.1924098040ResponseFIR/20230413103931.351392.191 ResponseFIR/20230413103931.35148.192 ResponseFIR/20230413103931.35148.1924000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.12CMG-6TDGuralp10ResponseFIR/20230413103931.503302.1492 NRL/Guralp/CMG_6TD.1.800_200_100.100/4 ResponseFIR/20230413103931.503553.1494 ResponseFIR/20230413103931.503683.1495 ResponseFIR/20230413103931.503832.1496CMG-6TDGuralp10ResponseFIR/20230413103931.503302.1492 NRL/Guralp/CMG_6TD.1.800_200_100.100/4 ResponseFIR/20230413103931.503553.1494 ResponseFIR/20230413103931.503683.1495 ResponseFIR/20230413103931.503832.1496CMG-6TDGuralp10ResponseFIR/20230413103931.503302.1492 NRL/Guralp/CMG_6TD.1.800_200_100.100/4 ResponseFIR/20230413103931.503553.1494 ResponseFIR/20230413103931.503683.1495 ResponseFIR/20230413103931.503832.149610ResponseFIR/20230413103931.473587.1241 ResponseFIR/20230413103931.473695.1242 ResponseFIR/20230413103931.473785.1243 ResponseFIR/20230413103931.473875.124410ResponseFIR/20230413103931.473587.1241 ResponseFIR/20230413103931.473695.1242 ResponseFIR/20230413103931.473785.1243 ResponseFIR/20230413103931.473875.124410ResponseFIR/20230413103931.473587.1241 ResponseFIR/20230413103931.473695.1242 ResponseFIR/20230413103931.473785.1243 ResponseFIR/20230413103931.473875.124410ResponseFIR/20230413103931.473587.1241 ResponseFIR/20230413103931.473695.1242 ResponseFIR/20230413103931.473785.1243 ResponseFIR/20230413103931.473875.124410ResponseFIR/20230413103931.473587.1241 ResponseFIR/20230413103931.473695.1242 ResponseFIR/20230413103931.473785.1243 ResponseFIR/20230413103931.473875.124410ResponseFIR/20230413103931.473587.1241 ResponseFIR/20230413103931.473695.1242 ResponseFIR/20230413103931.473785.1243 ResponseFIR/20230413103931.473875.124410ResponseFIR/20230413103931.503302.1492 ResponseFIR/20230413103931.503427.1493 ResponseFIR/20230413103931.503553.1494 ResponseFIR/20230413103931.503683.1495 ResponseFIR/20230413103931.503832.149610ResponseFIR/20230413103931.503302.1492 ResponseFIR/20230413103931.503427.1493 ResponseFIR/20230413103931.503553.1494 ResponseFIR/20230413103931.503683.1495 ResponseFIR/20230413103931.503832.149610ResponseFIR/20230413103931.503302.1492 ResponseFIR/20230413103931.503427.1493 ResponseFIR/20230413103931.503553.1494 ResponseFIR/20230413103931.503683.1495 ResponseFIR/20230413103931.503832.149610ResponseFIR/20230413103931.503302.1492 ResponseFIR/20230413103931.533471.1707 ResponseFIR/20230413103931.503553.1494 ResponseFIR/20230413103931.503683.1495 ResponseFIR/20230413103931.503832.149610ResponseFIR/20230413103931.503302.1492 ResponseFIR/20230413103931.533471.1707 ResponseFIR/20230413103931.503553.1494 ResponseFIR/20230413103931.503683.1495 ResponseFIR/20230413103931.503832.149610ResponseFIR/20230413103931.503302.1492 ResponseFIR/20230413103931.533471.1707 ResponseFIR/20230413103931.503553.1494 ResponseFIR/20230413103931.503683.1495 ResponseFIR/20230413103931.503832.149610ResponseFIR/20230413103931.503302.1492 ResponseFIR/20230413103931.533471.1707 ResponseFIR/20230413103931.503553.1494 ResponseFIR/20230413103931.503683.1495 ResponseFIR/20230413103931.503832.149610ResponseFIR/20230413103931.503302.1492 ResponseFIR/20230413103931.533471.1707 ResponseFIR/20230413103931.503553.1494 ResponseFIR/20230413103931.503683.1495 ResponseFIR/20230413103931.503832.149610ResponseFIR/20230413103931.503302.1492 ResponseFIR/20230413103931.533471.1707 ResponseFIR/20230413103931.503553.1494 ResponseFIR/20230413103931.503683.1495 ResponseFIR/20230413103931.503832.14964000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.341307.88 ResponseFIR/20230413103931.341365.89 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.124000000ResponsePAZ/20230413103931.334246.9ResponseFIR/20230413103931.33437.10 ResponseFIR/20230413103931.33443.11 ResponseFIR/20230413103931.334477.12A1202.518.31871e+171611(0,0) (0,0) (-31.63,0) (-160,0) (-350,0) (-3177,0)(-0.036614,0.037059) (-0.036614,-0.037059) (-32.55,0) (-142,0) (-364,404) (-364,-404) (-1260,0) (-4900,5200) (-4900,-5200) (-7100,1700) (-7100,-1700)A0.99254211100A753.114.34491e+171611(0,0) (0,0) (-392,0) (-1960,0) (-1490,1740) (-1490,-1740)(-0.2214,0.2221) (-0.2214,-0.2221) (-343,0) (-370,467) (-370,-467) (-836,1522) (-836,-1522) (-4900,4700) (-4900,-4700) (-6900,0) (-15000,0)A7981571508000125(0,0) (0,0)(-1005.31,0) (-502.655,0) (-1130.97,0) (-0.0740159,-0.0740159) (-0.0740159,0.0740159)A7961571508000125(0,0) (0,0)(-1005.31,0) (-502.655,0) (-1130.97,0) (-0.0740159,-0.0740159) (-0.0740159,0.0740159)A8001571508000125(0,0) (0,0)(-1005.31,0) (-502.655,0) (-1130.97,0) (-0.0740159,-0.0740159) (-0.0740159,0.0740159)A754.314.34493e+171611(0,0) (0,0) (-392,0) (-1960,0) (-1490,1740) (-1490,-1740)(-0.03691,0.03702) (-0.03691,-0.03702) (-343,0) (-370,467) (-370,-467) (-836,1522) (-836,-1522) (-4900,4700) (-4900,-4700) (-6900,0) (-15000,0)A111100A59400.05571508000125(0,0) (0,0)(-0.037008,0.037008) (-0.037008,-0.037008) (-502.65,0) (-1005,0) (-1131,0)A80019.09214e+11126(0,0) (0,0)(-0.07402,0.07402) (-0.07402,-0.07402) (-392,850.7) (-392,-850.7) (-2199,0) (-471.2,0)A8001571571000125(0,0) (0,0)(-1005.31,0) (-502.655,0) (-1130.97,0) (-0.0740159,-0.0740159) (-0.0740159,0.0740159)A7961571571000125(0,0) (0,0)(-1005.31,0) (-502.655,0) (-1130.97,0) (-0.0740159,-0.0740159) (-0.0740159,0.0740159)A240019.0926e+11126(0,0) (0,0)(-0.148,0.148) (-0.148,-0.148) (-391.96,850.69) (-391.96,-850.69) (-471.24,0) (-2199.1,0)A7941571508000125(0,0) (0,0)(-0.0740159,-0.0740159) (-0.0740159,0.0740159) (-1005.31,0) (-502.655,0) (-1130.97,0)A7961571508000125(0,0) (0,0)(-0.0740159,-0.0740159) (-0.0740159,0.0740159) (-1005.31,0) (-502.655,0) (-1130.97,0)A7981571508000125(0,0) (0,0)(-0.0740159,-0.0740159) (-0.0740159,0.0740159) (-1005.31,0) (-502.655,0) (-1130.97,0)A1202.518.31871e+171611(0,0) (0,0) (-31.63,0) (-160,0) (-350,0) (-3177,0)(-0.036614,0.037059) (-0.036614,-0.037059) (-32.55,0) (-142,0) (-364,404) (-364,-404) (-1260,0) (-4900,5204) (-4900,-5204) (-7100,1700) (-7100,-1700)A75011636980145(0,0) (0,0) (-566.114,0) (-4398.22,0)(-0.150247,0) (-0.0144008,0) (-7539.82,0) (-3487.82,22985) (-3487.82,-22985)111586.00186.00187B-4.36251e-10 -1.80638e-09 -5.57039e-09 -1.43857e-08 -3.29564e-08 -6.9106e-08 -1.35197e-07 -2.49921e-07 -4.40432e-07 -7.44758e-07 -1.21435e-06 -1.91648e-06 -2.93628e-06 -4.37772e-06 -6.36325e-06 -9.03122e-06 -1.25305e-05 -1.70116e-05 -2.26134e-05 -2.94454e-05 -3.75653e-05 -4.69514e-05 -5.7471e-05 -6.88472e-05 -8.06233e-05 -9.21305e-05 -0.00010246 -0.000110448 -0.000114666 -0.000113442 -0.000104896 -8.70058e-05 -5.77047e-05 -1.50087e-05 4.2822e-05 0.000117095 0.000208472 0.000316746 0.000440611 0.00057745 0.000723153 0.000871981 0.00101651 0.00114768 0.00125488 0.00132628 0.0013492 0.00131066 0.00119801 0.000999781 0.000706519 0.000311744 -0.00018709 -0.000787669 -0.00148199 -0.00225565 -0.00308741 -0.00394894 -0.00480495 -0.00561366 -0.0063276 -0.00689485 -0.00726063 -0.0073692 -0.00716606 -0.00660035 -0.00562732 -0.00421081 -0.0023256 4.04992e-05 0.00288491 0.00618906 0.00991772 0.014019 0.0184249 0.023053 0.0278081 0.0325852 0.0372727 0.0417557 0.0459205 0.0496579 0.0528678 0.0554623 0.0573694 0.0585357 0.0589281115474748B6.15343e-09 3.53052e-08 1.08986e-07 2.14286e-07 2.2567e-07 -1.68219e-07 -1.41445e-06 -3.77091e-06 -6.66079e-06 -7.96281e-06 -3.89963e-06 9.60392e-06 3.36284e-05 6.18314e-05 7.74814e-05 5.58158e-05 -2.52052e-05 -0.000166561 -0.000329415 -0.000428927 -0.000354311 -2.02975e-05 0.000562296 0.00122953 0.00166455 0.00148856 0.000439482 -0.00141854 -0.00355801 -0.00503543 -0.0047839 -0.00210515 0.00281102 0.00859469 0.0128681 0.0129735 0.00711022 -0.0045056 -0.0189689 -0.0309348 -0.0339263 -0.0224213 0.00594329 0.0488783 0.0993351 0.147012 0.181112 0.193488112111111112B-2.4877e-10 4.73744e-09 1.24032e-08 2.18423e-09 -2.9735e-08 -2.7741e-08 4.8235e-08 9.04852e-08 -4.3772e-08 -2.02925e-07 -2.93251e-08 3.57677e-07 2.38076e-07 -5.05054e-07 -6.56661e-07 5.29514e-07 1.33303e-06 -2.35391e-07 -2.23456e-06 -6.43189e-07 3.17885e-06 2.39189e-06 -3.76643e-06 -5.21313e-06 3.34388e-06 9.06984e-06 -1.0342e-06 -1.34948e-05 -4.1282e-06 1.74081e-05 1.29325e-05 -1.90112e-05 -2.55982e-05 1.58385e-05 4.12985e-05 -5.05136e-06 -5.76943e-05 -1.59734e-05 7.06232e-05 4.87621e-05 -7.41183e-05 -9.2656e-05 6.09281e-05 0.000143704 -2.36571e-05 -0.000193818 -4.34607e-05 0.000230529 0.0001423 -0.000237697 -0.00026842 0.00019742 0.000408986 -9.32264e-05 -0.000541667 -8.56506e-05 0.000635146 0.000339424 -0.000651794 -0.000653162 0.000552734 0.000993533 -0.000305133 -0.00130802 -0.000108987 0.00152764 0.000683646 -0.00157381 -0.00138119 0.00136932 0.00212784 -0.000852739 -0.00281433 -5.43598e-06 0.00330293 0.00118724 -0.00344149 -0.00261502 0.00308386 0.00414554 -0.00211493 -0.00557267 0.000477543 0.00664017 0.00180254 -0.00706464 -0.00459738 0.00656694 0.0076669 -0.00490895 -0.0106603 0.00193106 0.0131289 0.00241487 -0.0145476 -0.00804248 0.0143373 0.0147221 -0.0118736 -0.0220889 0.00645106 0.0296694 0.00287172 -0.0369222 -0.0177763 0.0432915 0.0424836 -0.0482648 -0.0928087 0.0514286 0.313777 0.447486111581.99981.99983B-4.04791e-10 -1.39029e-10 6.728e-10 2.75797e-09 7.54651e-09 1.76668e-08 3.76936e-08 7.52313e-08 1.42425e-07 2.57999e-07 4.49904e-07 7.58655e-07 1.24139e-06 1.97658e-06 3.06939e-06 4.65728e-06 6.91569e-06 1.00631e-05 1.43649e-05 2.0135e-05 2.77346e-05 3.75661e-05 5.00617e-05 6.56652e-05 8.48049e-05 0.000107859 0.000135108 0.000166685 0.000202505 0.0002422 0.000285038 0.00032985 0.000374956 0.000418099 0.0004564 0.000486339 0.000503757 0.000503913 0.00048158 0.000431195 0.000347068 0.000223655 5.58818e-05 -0.000160476 -0.000428365 -0.000749008 -0.00112143 -0.00154201 -0.00200406 -0.00249746 -0.00300841 -0.00351925 -0.00400852 -0.00445107 -0.00481847 -0.00507953 -0.00520107 -0.00514889 -0.00488884 -0.00438813 -0.00361664 -0.0025484 -0.00116293 0.00055335 0.00260595 0.00499129 0.00769592 0.0106961 0.0139576 0.0174363 0.0210783 0.0248217 0.0285976 0.0323318 0.0359474 0.0393664 0.0425127 0.0453141 0.0477047 0.0496275 0.0510356 0.0518947 0.05218341110939394B8.46923e-10 2.27422e-09 4.53895e-09 6.87379e-09 7.10904e-09 7.62784e-10 -1.98301e-08 -6.60994e-08 -1.53114e-07 -2.98249e-07 -5.18152e-07 -8.23636e-07 -1.21248e-06 -1.66055e-06 -2.11247e-06 -2.47353e-06 -2.60562e-06 -2.33013e-06 -1.44098e-06 2.69804e-07 2.97271e-06 6.74886e-06 1.15296e-05 1.70354e-05 2.27248e-05 2.7767e-05 3.10537e-05 3.1263e-05 2.69861e-05 1.69189e-05 1.11176e-07 -2.37456e-05 -5.40267e-05 -8.89227e-05 -0.000125282 -0.000158604 -0.000183236 -0.000192798 -0.000180864 -0.00014185 -7.2067e-05 2.91938e-05 0.000158665 0.000308386 0.000465422 0.000612198 0.000727547 0.000788512 0.000772858 0.000662123 0.000444926 0.000120161 -0.000300369 -0.000790361 -0.00130887 -0.00180202 -0.00220662 -0.00245579 -0.00248619 -0.00224631 -0.00170503 -0.000859463 0.000259011 0.00158157 0.0030025 0.00438414 0.00556632 0.00638 0.0066644 0.00628604 0.00515814 0.0032582 0.000641682 -0.00255018 -0.00609006 -0.0096727 -0.0129317 -0.0154642 -0.0168626 -0.016749 -0.0148125 -0.0108426 -0.00475778 0.00337489 0.0133318 0.0247374 0.0370816 0.0497507 0.062067 0.0733368 0.0829015 0.0901866 0.0947464 0.096298510.005256383.5008383.5008383C5.96091e-08 1.78827e-07 3.57655e-07 5.96091e-07 8.94137e-07 1.25179e-06 1.66906e-06 2.14593e-06 2.68241e-06 3.2785e-06 3.9342e-06 4.64951e-06 5.42443e-06 6.25896e-06 7.15309e-06 8.10684e-06 9.1202e-06 1.01932e-05 1.13257e-05 1.25179e-05 1.37697e-05 1.50811e-05 1.64521e-05 1.78827e-05 1.9373e-05 2.09228e-05 2.25322e-05 2.42013e-05 2.593e-05 2.77182e-05 2.95661e-05 3.14736e-05 3.34407e-05 3.54674e-05 3.75537e-05 3.96997e-05 4.19052e-05 4.41704e-05 4.64951e-05 4.88795e-05 5.13235e-05 5.3827e-05 5.63902e-05 5.9013e-05 6.16954e-05 6.44375e-05 6.72391e-05 7.01003e-05 7.30212e-05 7.60016e-05 7.90417e-05 8.21414e-05 8.53007e-05 8.85195e-05 9.1798e-05 9.51362e-05 9.85339e-05 0.000101991 0.000105508 0.000109085 0.000112721 0.000116417 0.000120172 0.000123987 0.000127862 0.000131796 0.00013579 0.000139843 0.000143956 0.000148129 0.000152361 0.000156653 0.000161004 0.000165415 0.000169886 0.000174416 0.000179006 0.000183656 0.000188365 0.000193134 0.000197962 0.00020285 0.000207797 0.000212805 0.000217871 0.000222998 0.000228184 0.000233429 0.000238735 0.000244099 0.000249524 0.000255008 0.000260551 0.000266155 0.000271818 0.00027754 0.000283322 0.000289164 0.000295065 0.000301026 0.000307047 0.000313127 0.000319266 0.000325466 0.000331725 0.000338043 0.000344422 0.000350859 0.000357357 0.000363914 0.00037053 0.000377207 0.000383942 0.000390738 0.000397593 0.000404508 0.000411482 0.000418516 0.000425609 0.000432762 0.000439975 0.000447247 0.000454579 0.000461971 0.000469422 0.000476933 0.000484503 0.000492133 0.000499822 0.000507572 0.00051538 0.000523249 0.000531177 0.000539165 0.000547212 0.000555319 0.000563485 0.000571711 0.000579997 0.000588342 0.000596747 0.000605211 0.000613736 0.000622319 0.000630963 0.000639665 0.000648428 0.00065725 0.000666132 0.000675073 0.000684074 0.000693135 0.000702255 0.000711435 0.000720674 0.000729973 0.000739332 0.00074875 0.000758228 0.000767766 0.000777363 0.000787019 0.000796736 0.000806511 0.000816347 0.000826242 0.000836197 0.000846211 0.000856285 0.000866419 0.000876612 0.000886865 0.000897177 0.000907549 0.00091798 0.000928472 0.000939023 0.000949633 0.000960303 0.000971033 0.000981822 0.000992671 0.00100358 0.00101455 0.00102557 0.00103666 0.00104781 0.00105902 0.00107028 0.00108161 0.00109299 0.00110444 0.00111594 0.00112751 0.00113913 0.00115081 0.00116256 0.00117436 0.00118622 0.00119814 0.00121012 0.00122217 0.00123427 0.00124643 0.00125865 0.00127093 0.00128327 0.00129566 0.00130812 0.00132064 0.00133322 0.00134585 0.00135855 0.00137131 0.00138412 0.001397 0.00140993 0.00142293 0.00143598 0.0014491 0.00146227 0.0014755 0.0014888 0.00150215 0.00151556 0.00152903 0.00154256 0.00155616 0.00156981 0.00158352 0.00159729 0.00161112 0.001625 0.00163895 0.00165296 0.00166703 0.00168116 0.00169534 0.00170959 0.0017239 0.00173826 0.00175269 0.00176717 0.00178172 0.00179632 0.00181098 0.00182571 0.00184049 0.00185533 0.00187024 0.0018852 0.00190022 0.0019153 0.00193044 0.00194564 0.0019609 0.00197604 0.00199106 0.00200597 0.00202075 0.00203541 0.00204996 0.00206438 0.00207869 0.00209288 0.00210694 0.00212089 0.00213472 0.00214843 0.00216202 0.00217549 0.00218885 0.00220208 0.00221519 0.00222819 0.00224106 0.00225382 0.00226646 0.00227898 0.00229137 0.00230365 0.00231581 0.00232786 0.00233978 0.00235158 0.00236326 0.00237483 0.00238627 0.0023976 0.0024088 0.00241989 0.00243086 0.00244171 0.00245244 0.00246305 0.00247354 0.00248391 0.00249416 0.0025043 0.00251431 0.00252421 0.00253398 0.00254364 0.00255318 0.0025626 0.0025719 0.00258108 0.00259014 0.00259908 0.0026079 0.0026166 0.00262519 0.00263365 0.002642 0.00265022 0.00265833 0.00266632 0.00267418 0.00268193 0.00268956 0.00269707 0.00270447 0.00271174 0.00271889 0.00272593 0.00273284 0.00273964 0.00274631 0.00275287 0.00275931 0.00276562 0.00277182 0.0027779 0.00278387 0.00278971 0.00279543 0.00280103 0.00280652 0.00281188 0.00281713 0.00282225 0.00282726 0.00283215 0.00283692 0.00284157 0.0028461 0.00285051 0.0028548 0.00285897 0.00286303 0.00286696 0.00287078 0.00287447 0.00287805 0.00288151 0.00288484 0.00288806 0.00289116 0.00289414 0.002897 0.00289975 0.00290237 0.00290487 0.00290726 0.00290952 0.00291167 0.00291369 0.0029156 0.00291739 0.00291906 0.00292061 0.00292204 0.00292335 0.00292454 0.00292562 0.00292657 0.0029274 0.00292812 0.00292872 0.00292919 0.00292955 0.00292979 0.0029299110.0055250250251B-1.3914904e-09 -7.5868625e-11 1.4200877e-09 2.8499027e-09 3.8190777e-09 3.8215773e-09 2.4064207e-09 -5.6244574e-10 -4.6551592e-09 -8.7699674e-09 -1.1288737e-08 -1.053179e-08 -5.4296423e-09 3.79412e-09 1.5210828e-08 2.5311517e-08 2.9804952e-08 2.5000226e-08 9.4252864e-09 -1.4870108e-08 -4.1928271e-08 -6.284793e-08 -6.8149231e-08 -5.107519e-08 -1.0878236e-08 4.5127627e-08 1.0185655e-07 1.3963625e-07 1.3993438e-07 9.2249317e-08 0 -1.1695791e-07 -2.2496485e-07 -2.8472615e-07 -2.6353065e-07 -1.4822868e-07 4.5393434e-08 2.7098925e-07 4.6019529e-07 5.4077426e-07 4.6031842e-07 2.0862088e-07 -1.6927223e-07 -5.758821e-07 -8.8252986e-07 -9.6576255e-07 -7.4986984e-07 -2.4238295e-07 4.5091378e-07 1.1407705e-06 1.6001965e-06 1.6320524e-06 1.1402636e-06 1.8158658e-07 -1.0231328e-06 -2.1297997e-06 -2.7604263e-06 -2.6205924e-06 -1.6114345e-06 1.0180872e-07 2.0952424e-06 3.777385e-06 4.5513356e-06 4.0075255e-06 2.0893653e-06 -8.2703715e-07 -3.9798909e-06 -6.4017727e-06 -7.1966346e-06 -5.8400436e-06 -2.4091638e-06 2.3434889e-06 7.1217243e-06 1.0413305e-05 1.0939091e-05 8.0982523e-06 2.2657488e-06 -5.176319e-06 -1.2124364e-05 -1.6312601e-05 -1.6008176e-05 -1.06402e-05 -1.1520679e-06 1.0075805e-05 1.9770586e-05 2.4672866e-05 2.2567268e-05 1.3128197e-05 -1.7134926e-06 -1.8065588e-05 -3.1029033e-05 -3.6099904e-05 -3.0636361e-05 -1.4936201e-05 7.4666179e-06 3.0482833e-05 4.7039498e-05 5.1163443e-05 3.9987597e-05 1.5040358e-05 -1.7677054e-05 -4.9001375e-05 -6.9068141e-05 -7.0294213e-05 -5.0013104e-05 -1.1897642e-05 3.4420018e-05 7.5627429e-05 9.8424164e-05 9.3643072e-05 5.9567625e-05 3.3207122e-06 -6.0327667e-05 -0.00011265679 -0.00013633071 -0.00012090131 -6.6791901e-05 1.363986e-05 9.8607708e-05 0.00016258294 0.00018374526 0.00015108493 6.8926494e-05 -4.2792381e-05 -0.00015301586 -0.00022794736 -0.00024112837 -0.00018228976 -6.2128941e-05 8.8864959e-05 0.00022777007 0.00031112683 0.00030816397 0.00021142843 4.1309375e-05 -0.00015751612 -0.0003273975 -0.00041405684 -0.00038343927 -0.00023396306 -5.8024926e-19 0.00025528342 0.00045651008 0.00053789624 0.0004640963 0.00024364896 -6.9728572e-05 -0.00038946376 -0.00061951126 -0.00068264328 -0.00054546965 -0.00023230248 0.00017729712 0.00056792735 0.00082024437 0.0008467183 0.00062072407 0.00018959991 -0.00033359735 -0.00079887886 -0.0010616015 -0.00102653 -0.00068050007 -0.0001029013 0.000550941 0.001090594 0.0013451195 0.0012160399 0.00071256147 -4.2928781e-05 -0.00084302285 -0.0014511786 -0.0016705973 -0.0014063299 -0.00070141369 0.00026576715 0.0012249775 0.0018884223 0.0020357704 0.0015851559 0.00062781635 -0.00058666678 -0.001713666 -0.0024098539 -0.002436082 -0.0017364286 -0.00046803468 0.0010307021 0.002328423 0.0030231657 0.0028645851 0.0018394867 0.00019253014 -0.0016286287 -0.003092666 -0.0037372778 -0.0033120009 -0.0018678761 0.00023649145 0.0024201235 0.0040371188 0.0045645396 0.0037669524 0.0017870414 -0.00086985135 -0.0034601463 -0.0052061666 -0.0055250624 -0.0042163713 -0.0015496406 0.001781046 0.0048318116 0.00667071 0.006655423 0.0046460659 0.0010854441 -0.0030868795 -0.0066739995 -0.0085558716 -0.0080273973 -0.0050414194 -0.00027781321 0.00499557 0.0092464772 0.011107252 0.0097932448 0.0053881721 -0.0010978588 -0.0079346674 -0.013107238 -0.014876263 -0.012315809 -0.0056732344 0.0035677012 0.012967969 0.01971571 0.021381119 0.016656969 0.005885467 -0.0087886052 -0.023714487 -0.034448203 -0.036739736 -0.027573917 -0.0060163674 0.02635173 0.065538791 0.10579396 0.14068136 0.16435091 0.1727272740000011001B110.005256383.5008383.5008383C5.960909988e-08 1.788269941e-07 3.576550114e-07 5.96091013e-07 8.941369742e-07 1.251789968e-06 1.669059998e-06 2.145930011e-06 2.682410013e-06 3.278499889e-06 3.934199867e-06 4.649510174e-06 5.424430128e-06 6.258960184e-06 7.153089882e-06 8.106840141e-06 9.120200048e-06 1.019320007e-05 1.132570014e-05 1.251790036e-05 1.376969976e-05 1.508110017e-05 1.645210068e-05 1.788269947e-05 1.93729993e-05 2.09228001e-05 2.253220009e-05 2.420129931e-05 2.592999954e-05 2.771820073e-05 2.956609933e-05 3.147359894e-05 3.344070137e-05 3.546740118e-05 3.755369835e-05 3.969969839e-05 4.190520121e-05 4.417039963e-05 4.649510083e-05 4.887950126e-05 5.132349906e-05 5.382699965e-05 5.639019946e-05 5.901300028e-05 6.169539847e-05 6.443750317e-05 6.723910337e-05 7.010030095e-05 7.302119775e-05 7.600159734e-05 7.904170343e-05 8.214139962e-05 8.530070045e-05 8.851949679e-05 9.179799963e-05 9.513620171e-05 9.853389929e-05 0.0001019909978 0.0001055079993 0.0001090849983 0.000112720998 0.0001164170026 0.0001201720006 0.0001239870035 0.0001278620039 0.0001317959977 0.0001357900037 0.0001398430031 0.0001439560001 0.0001481289946 0.0001523609972 0.0001566529972 0.0001610040053 0.0001654149964 0.0001698859996 0.0001744159963 0.0001790060051 0.0001836559968 0.0001883649966 0.0001931339939 0.0001979619992 0.0002028500021 0.0002077969984 0.0002128049964 0.0002178709983 0.0002229980018 0.0002281839988 0.0002334290039 0.0002387349959 0.0002440990065 0.0002495239896 0.0002550079953 0.0002605509944 0.0002661549952 0.0002718179894 0.0002775400062 0.000283322006 0.0002891639888 0.0002950649941 0.0003010260116 0.000307047012 0.0003131270059 0.0003192659933 0.0003254660114 0.0003317249939 0.0003380429989 0.0003444220056 0.0003508589871 0.0003573569993 0.000363914005 0.0003705300041 0.0003772070049 0.0003839420096 0.0003907379869 0.0003975929867 0.0004045079986 0.000411482004 0.0004185159924 0.0004256090033 0.0004327619972 0.0004399750032 0.0004472470027 0.0004545790143 0.0004619710089 0.000469421997 0.0004769329971 0.0004845029907 0.0004921330255 0.0004998220247 0.0005075720255 0.0005153800012 0.0005232489784 0.0005311769783 0.0005391649902 0.0005472120247 0.0005553190131 0.000563485024 0.0005717109889 0.000579997024 0.0005883420235 0.000596746977 0.0006052110111 0.0006137359887 0.0006223189994 0.0006309630116 0.0006396649987 0.0006484279875 0.0006572499988 0.0006661320222 0.0006750730099 0.0006840740098 0.0006931350217 0.000702254998 0.0007114349864 0.0007206739974 0.0007299730205 0.0007393319975 0.000748749997 0.0007582280086 0.0007677659742 0.0007773630205 0.0007870189729 0.0007967359852 0.0008065109723 0.0008163470193 0.0008262419724 0.0008361969958 0.0008462109836 0.0008562849835 0.0008664189954 0.0008766119718 0.0008868650184 0.0008971769712 0.0009075489943 0.0009179799818 0.000928472029 0.0009390229825 0.0009496330167 0.0009603030048 0.000971033005 0.0009818220278 0.0009926710045 0.001003580051 0.001014549984 0.001025569974 0.001036660047 0.001047809958 0.001059020055 0.001070279977 0.001081609982 0.001092990045 0.001104439958 0.001115940046 0.001127509982 0.001139129978 0.001150810043 0.001162559958 0.001174360048 0.001186219975 0.001198139973 0.001210120041 0.001222169958 0.00123427005 0.00124642998 0.00125864998 0.00127093005 0.001283269958 0.00129566004 0.001308119972 0.001320639974 0.001333220047 0.001345849945 0.001358550042 0.001371309976 0.001384119969 0.001397000044 0.001409929944 0.001422930043 0.001435979968 0.001449099975 0.00146227004 0.001475499943 0.001488800044 0.001502149971 0.001515559969 0.001529030036 0.001542560058 0.001556160045 0.001569809974 0.001583519974 0.001597290044 0.001611119951 0.001625000034 0.001638949965 0.001652959967 0.001667030039 0.001681159949 0.001695340034 0.001709589968 0.001723899972 0.001738260034 0.001752689946 0.001767170033 0.00178171997 0.001796319964 0.001810980029 0.001825709944 0.001840490033 0.00185532996 0.001870239968 0.001885200036 0.001900220057 0.001915300032 0.001930439961 0.00194563996 0.001960899914 0.001976039959 0.001991060097 0.002005970106 0.002020749962 0.002035409911 0.002049959963 0.002064380096 0.0020786901 0.002092879964 0.002106939908 0.002120889956 0.002134720096 0.002148430096 0.002162019955 0.002175489906 0.002188849961 0.002202080097 0.002215190092 0.002228189958 0.002241059905 0.002253819956 0.002266460098 0.002278980101 0.00229136995 0.002303649904 0.00231580995 0.0023278601 0.002339780098 0.002351579955 0.002363259904 0.002374829957 0.002386270091 0.002397600096 0.002408799948 0.002419889905 0.002430859953 0.002441710094 0.002452440094 0.002463049954 0.002473539906 0.00248390995 0.002494160086 0.002504300093 0.002514309948 0.002524209907 0.002533979947 0.00254364009 0.002553180093 0.002562599955 0.00257189991 0.002581079956 0.002590140095 0.002599080093 0.002607899951 0.002616599901 0.002625189954 0.002633650089 0.002642000094 0.002650219947 0.002658329904 0.002666319953 0.002674180083 0.002681930084 0.002689559944 0.002697069896 0.002704469953 0.00271174009 0.002718890086 0.002725929953 0.002732839901 0.002739639953 0.002746310085 0.002752870088 0.002759309951 0.002765619894 0.002771819942 0.002777900081 0.002783870092 0.00278970995 0.0027954299 0.002801029943 0.00280652009 0.002811880084 0.002817129949 0.002822249895 0.002827259945 0.002832150087 0.002836920088 0.002841569949 0.002846099902 0.002850509947 0.002854800085 0.002858970081 0.002863029949 0.002866959898 0.00287077995 0.002874470083 0.002878050087 0.00288150995 0.002884839894 0.002888059942 0.002891160082 0.002894140081 0.00289699994 0.002899749903 0.002902369946 0.002904870082 0.002907260088 0.002909519942 0.002911669901 0.002913689939 0.002915600082 0.002917390084 0.002919059945 0.002920609899 0.002922039945 0.002923350083 0.00292454008 0.002925619949 0.002926569898 0.002927399939 0.002928120084 0.002928720089 0.002929189941 0.002929549897 0.002929789945 0.002929910086108161617B0 0 0.00024 0.00098 0.00244 0.00488 0.00855 0.01367 0.02051 0.0293 0.03931 0.04981 0.06006 0.06934 0.0769 0.08203 0.08398102667B0.00024 0.00293 0.01611 0.05371 0.12085 0.19336 0.225591025050101A-1e-06 -6e-06 -3e-06 -4.3e-05 -5e-06 1e-06 2.3e-05 1e-06 -5.3e-05 -0.000367 0.000376 0.000854 3.1e-05 -0.001276 -0.000911 0.001277 0.002152 -0.000462 -0.003338 -0.001409 0.003771 0.004194 -0.002643 -0.007201 -0.000644 0.009184 0.006084 -0.008578 -0.01274 0.003982 0.018626 0.005205 -0.020941 -0.018163 0.016667 0.032245 -0.003466 -0.042953 -0.019326 0.044309 0.049791 -0.029416 -0.082608 -0.009342 0.107552 0.08166 -0.10311 -0.204208 -3.1e-05 0.390433 0.589958 0.390433 -3.1e-05 -0.204208 -0.10311 0.08166 0.107552 -0.009342 -0.082608 -0.029416 0.049791 0.044309 -0.019326 -0.042953 -0.003466 0.032245 0.016667 -0.018163 -0.020941 0.005205 0.018626 0.003982 -0.01274 -0.008578 0.006084 0.009184 -0.000644 -0.007201 -0.002643 0.004194 0.003771 -0.001409 -0.003338 -0.000462 0.002152 0.001277 -0.000911 -0.001276 3.1e-05 0.000854 0.000376 -0.000367 -0.00041 2.5e-05 0.000262 0.000121 -0.0001 -0.000162 -9.8e-05 -2.9e-05 -3e-061010600600491A-0.000360027 0.000344217 0.00100482 0.00157039 0.00199825 0.00225762 0.00233175 0.00221896 0.00193244 0.00149883 0.000955835 0.000348985 -0.000272125 -0.00085789 -0.00136256 -0.00174788 -0.00198586 -0.00206083 -0.00197038 -0.00172529 -0.00134835 -0.000872359 -0.000337228 0.000213265 0.000735044 0.00118731 0.00153558 0.0017544 0.00182912 0.00175685 0.00154641 0.00121742 0.000798509 0.000324845 -0.000164792 -0.000631154 -0.00103767 -0.00135326 -0.00155473 -0.0016284 -0.00157106 -0.00138995 -0.00110202 -0.000732332 -0.000311941 0.000124678 0.000542492 0.000908658 0.00119513 0.00138073 0.00145274 0.00140776 0.0012517 0.000999227 0.000672355 0.000298589 -9.14037e-05 -0.000466302 -0.000796571 -0.00105685 -0.00122786 -0.00129774 -0.00126301 -0.00112849 -0.000906885 -0.000617564 -0.000284895 6.38068e-05 0.000400484 0.000698596 0.000935182 0.00109266 0.00116007 0.00113386 0.00101796 0.000823379 0.000567168 0.000270918 -4.09782e-05 -0.000343427 -0.000612542 -0.000827566 -0.000972494 -0.00103715 -0.00101802 -0.000918284 -0.000747457 -0.000520572 -0.000256777 2.21878e-05 0.000293836 0.00053671 0.000732049 0.000865251 0.000926957 0.000913695 0.000827998 0.000678152 0.000477329 0.000242546 -6.83963e-06 -0.000250682 -0.000469729 -0.000647008 -0.000769258 -0.000827864 -0.000819445 -0.000745997 -0.000614673 -0.000437096 -0.000228316 -5.57303e-06 0.000213116 0.000410452 0.000571147 0.000683159 0.000738576 0.000734106 0.000671327 0.000556424 0.00039959 0.000214159 1.54674e-05 -0.000180423 -0.000357956 -0.000503406 -0.000605837 -0.000657976 -0.000656724 -0.000603259 -0.000502899 -0.000364602 -0.000200167 -2.31862e-05 0.000151992 0.000311464 0.000442863 0.000536308 0.000585184 0.00058648 0.000541121 0.000453666 0.000331953 0.000186384 2.90424e-05 -0.00012733 -0.000270292 -0.000388756 -0.000473812 -0.000519395 -0.000522718 -0.000484407 -0.000408381 -0.000301495 -0.000172898 -3.33041e-05 0.000105992 0.000233874 0.000340432 0.000417635 0.000459984 0.000464857 0.000432655 0.000366747 0.000273108 0.000159785 3.61949e-05 -8.75741e-05 -0.000201702 -0.000297308 -0.00036718 -0.000406355 -0.00041236 -0.000385448 -0.000328496 -0.000246689 -0.00014706 -3.79533e-05 7.17491e-05 0.00017333 0.000258863 0.000321925 0.000357985 0.000364795 0.000342458 0.000293404 0.000222117 0.000134811 3.8743e-05 -5.82337e-05 -0.000148371 -0.00022468 -0.000281394 -0.000314444 -0.000321761 -0.000303343 -0.000261247 -0.000199348 -0.000123039 -3.87281e-05 4.67151e-05 0.000126466 0.000194311 0.000245139 0.000275314 0.000282899 0.000267833 0.000231832 0.000178277 0.000111818 3.80576e-05 -3.69847e-05 -0.000107303 -0.000167429 -0.000212818 -0.000240207 -0.000247866 -0.000235647 -0.00020501 -0.000158831 -0.000101164 -3.68506e-05 2.88188e-05 9.05991e-05 0.000143677 0.000184044 0.000208795 0.000216365 0.00020656 0.000180602 0.00014095 9.10908e-05 3.52412e-05 -2.2009e-05 -7.60853e-05 -0.000122756 -0.000158504 -0.000180766 -0.000188112 -0.000180349 -0.000158444 -0.000124544 -8.16137e-05 -3.33041e-05 1.64062e-05 6.35237e-05 0.000104398 0.000135913 0.000155821 0.000162855 0.000156775 0.000138402 0.000109538 7.27475e-05 3.11434e-05 -1.18315e-05 -5.27203e-05 -8.83341e-05 -0.000115991 -0.000133708 -0.000140339 -0.000135645 -0.000120327 -9.58741e-05 -6.44922e-05 -2.88337e-05 8.13603e-06 4.34518e-05 7.43419e-05 9.84818e-05 0.000114143 0.000120327 0.00011678 0.000104085 8.34763e-05 5.6833e-05 2.64347e-05 -5.21541e-06 -3.55393e-05 -6.21974e-05 -8.31634e-05 -9.69321e-05 -0.000102609 -0.000100002 -8.95411e-05 -7.22855e-05 -4.97848e-05 -2.40058e-05 2.93553e-06 2.88486e-05 5.1707e-05 6.9797e-05 8.18223e-05 8.70079e-05 8.51303e-05 7.65771e-05 6.21974e-05 4.33326e-05 2.15918e-05 -1.19209e-06 -2.32011e-05 -4.26918e-05 -5.82039e-05 -6.86198e-05 -7.32839e-05 -7.20024e-05 -6.50585e-05 -5.31673e-05 -3.74317e-05 -1.92374e-05 -7.45058e-08 1.84774e-05 3.4973e-05 4.81904e-05 5.7146e-05 6.13034e-05 6.04689e-05 5.48661e-05 4.51058e-05 3.20971e-05 1.69575e-05 9.83477e-07 -1.45584e-05 -2.84165e-05 -3.95775e-05 -4.72218e-05 -5.08726e-05 -5.03808e-05 -4.59105e-05 -3.79682e-05 -2.72691e-05 -1.47969e-05 -1.56462e-06 1.13249e-05 2.28733e-05 3.22163e-05 3.86834e-05 4.18574e-05 4.1604e-05 3.80725e-05 3.16501e-05 2.29627e-05 1.27554e-05 1.92225e-06 -8.68738e-06 -1.82092e-05 -2.59578e-05 -3.13669e-05 -3.4079e-05 -3.40044e-05 -3.12477e-05 -2.61217e-05 -1.91182e-05 -1.08629e-05 -2.07126e-06 6.55651e-06 1.43349e-05 2.06828e-05 2.51383e-05 2.7433e-05 2.74777e-05 2.53469e-05 2.12938e-05 1.57207e-05 9.11951e-06 2.07126e-06 -4.85778e-06 -1.11163e-05 -1.62423e-05 -1.98781e-05 -2.18004e-05 -2.19047e-05 -2.02805e-05 -1.71214e-05 -1.27405e-05 -7.53999e-06 -1.96695e-06 3.51667e-06 8.49366e-06 1.25617e-05 1.54823e-05 1.7032e-05 1.7181e-05 1.59591e-05 1.35303e-05 1.01477e-05 6.10948e-06 1.78814e-06 -2.47359e-06 -6.34789e-06 -9.53674e-06 -1.18166e-05 -1.30534e-05 -1.32024e-05 -1.23084e-05 -1.04755e-05 -7.91252e-06 -4.84288e-06 -1.54972e-06 1.69873e-06 4.63426e-06 7.06315e-06 8.80659e-06 9.77516e-06 9.90927e-06 9.25362e-06 7.89762e-06 6.00517e-06 3.74019e-06 1.2964e-06 -1.10269e-06 -3.29316e-06 -5.0813e-06 -6.3777e-06 -7.09295e-06 -7.19726e-06 -6.73532e-06 -5.76675e-06 -4.41074e-06 -2.77162e-06 -1.02818e-06 6.85453e-07 2.23517e-06 3.51667e-06 4.42564e-06 4.93228e-06 5.02169e-06 4.69387e-06 4.02331e-06 3.08454e-06 1.96695e-06 7.7486e-07 -3.8743e-07 -1.44541e-06 -2.30968e-06 -2.92063e-06 -3.24845e-06 -3.29316e-06 -3.08454e-06 -2.63751e-06 -2.02656e-06 -1.3113e-06 -5.36442e-07 2.08616e-07 8.64267e-07 1.40071e-06 1.77324e-06 1.96695e-06 1.99676e-06 1.84774e-06 1.57952e-06 1.20699e-06 7.7486e-07 3.27826e-07 -8.9407e-08 -4.61936e-07 -7.45058e-07 -9.53674e-07 -1.04308e-06 -1.04308e-06 -9.53674e-07 -8.04663e-07 -6.10948e-07 -3.8743e-07 -1.78814e-07 2.98023e-08 1.93715e-07 3.27826e-07 4.02331e-07 4.32134e-07 4.17233e-07 3.72529e-07 2.98023e-07 2.23517e-07 1.3411e-07 5.96046e-08 0 -5.96046e-08 -8.9407e-08 -8.9407e-08 -8.9407e-08 -8.9407e-08 -5.96046e-08 -4.47035e-08 -2.98023e-08 -1.49012e-08 0 0 0 0 0102250250251B0 0 0 -5.96046e-08 0 5.96046e-08 1.19209e-07 -1.19209e-07 -2.38419e-07 1.19209e-07 4.17233e-07 0 -5.96046e-07 -2.38419e-07 7.15256e-07 5.96046e-07 -7.15256e-07 -1.01328e-06 5.96046e-07 1.54972e-06 -1.78814e-07 -2.02656e-06 -4.76837e-07 2.32458e-06 1.43051e-06 -2.32458e-06 -2.563e-06 1.84774e-06 3.69549e-06 -9.53674e-07 -4.70877e-06 -5.36442e-07 5.36442e-06 2.44379e-06 -5.42402e-06 -4.76837e-06 4.64916e-06 7.09295e-06 -2.98023e-06 -9.23872e-06 2.98023e-07 1.06692e-05 3.27826e-06 -1.10865e-05 -7.51019e-06 1.01328e-05 1.19209e-05 -7.51019e-06 -1.60336e-05 3.09944e-06 1.91331e-05 2.86102e-06 -2.06232e-05 -1.00136e-05 1.98483e-05 1.78218e-05 -1.63913e-05 -2.53916e-05 9.95398e-06 3.15905e-05 -7.15256e-07 -3.54648e-05 -1.07884e-05 3.60012e-05 2.37226e-05 -3.22461e-05 -3.67761e-05 2.37823e-05 4.84586e-05 -1.06692e-05 -5.70416e-05 -6.67572e-06 6.07967e-05 2.69413e-05 -5.83529e-05 -4.8399e-05 4.8697e-05 6.87838e-05 -3.14713e-05 -8.55923e-05 7.21216e-06 9.62019e-05 2.27094e-05 -9.80496e-05 -5.60284e-05 8.94666e-05 8.95858e-05 -6.93202e-05 -0.000119746 3.77297e-05 0.000142455 3.8743e-06 -0.00015384 -5.26309e-05 0.00015074 0.000104606 -0.000130892 -0.000154614 9.35197e-05 0.000196993 -3.96967e-05 -0.000225782 -2.7597e-05 0.000235677 0.000103354 -0.000222504 -0.000181019 0.00018394 0.000252545 -0.000119925 -0.000309587 3.31402e-05 0.00034374 7.08699e-05 -0.000347972 -0.000184 0.000317276 0.000295818 -0.000249445 -0.000394642 0.000145972 0.000468433 -1.21593e-05 -0.000505924 -0.000142694 0.000497997 0.000305951 -0.000438929 -0.000462115 0.000327587 0.000594556 -0.000167727 -0.000686526 -3.12328e-05 0.000723302 0.00025475 -0.000693619 -0.000483751 0.000591576 0.000696242 -0.000417531 -0.000868976 0.00017941 0.000979543 0.000107765 -0.00100905 -0.000421941 0.000944138 0.000735819 -0.000779212 -0.00101858 0.000517726 0.0012387 -0.000173092 -0.00136691 -0.000231445 0.00137943 0.000663996 -0.00126094 -0.00108612 0.00100684 0.00145584 -0.000625372 -0.00173122 0.000137866 0.00187451 0.000421405 -0.00185633 -0.00100756 0.00165927 0.00156766 -0.0012812 -0.00204533 0.000736833 0.0023855 -5.84722e-05 -0.00253981 -0.000705183 0.00247216 0.00149184 -0.00216305 -0.00222987 0.00161368 0.00284415 -0.000847816 -0.00326264 -8.78572e-05 0.00342327 0.0011254 -0.00328088 -0.0021798 0.00281328 0.00315464 -0.00202572 -0.00394988 0.000953376 0.00447023 0.000338376 -0.00463438 -0.00175661 0.0043838 0.00318593 -0.00369054 -0.00449616 0.00256276 0.00555199 -0.00104851 -0.00622416 -0.000764191 0.00640082 0.0027501 -0.00599933 -0.00475281 0.00497586 0.006594 -0.00333369 -0.00808483 0.00112844 0.00903958 0.00153047 -0.0092895 -0.00448132 0.00869662 0.00751561 -0.00716639 -0.0103864 0.00465751 0.0128189 -0.00118887 -0.0145211 -0.00315785 0.0151942 0.00823975 -0.0145367 -0.0138587 0.0122386 0.0197716 -0.00795406 -0.025704 0.00122654 0.0313665 0.00870252 -0.0364729 -0.0234254 0.040758 0.0471818 -0.0439948 -0.0959132 0.0460098 0.314862 0.45330640000001001B1101581.99981.99983B-4.047908e-10 -1.390291e-10 6.728001e-10 2.757972e-09 7.546507e-09 1.766681e-08 3.769363e-08 7.523132e-08 1.424254e-07 2.57999e-07 4.499037e-07 7.586555e-07 1.241386e-06 1.97658e-06 3.069389e-06 4.657284e-06 6.915693e-06 1.00631e-05 1.436487e-05 2.013499e-05 2.773459e-05 3.756609e-05 5.006174e-05 6.566517e-05 8.480489e-05 0.0001078587 0.0001351084 0.0001666851 0.0002025053 0.0002421999 0.000285038 0.0003298504 0.000374956 0.0004180986 0.0004564003 0.0004863386 0.0005037566 0.0005039131 0.0004815801 0.0004311946 0.000347068 0.0002236552 5.588177e-05 -0.0001604755 -0.0004283654 -0.0007490084 -0.001121432 -0.001542013 -0.002004062 -0.002497462 -0.003008406 -0.00351925 -0.004008517 -0.00445107 -0.004818468 -0.005079526 -0.005201073 -0.005148892 -0.004888844 -0.004388128 -0.003616642 -0.002548397 -0.00116293 0.0005533497 0.002605953 0.004991292 0.007695918 0.01069608 0.01395761 0.01743625 0.02107831 0.02482173 0.02859758 0.03233183 0.0359474 0.03936644 0.04251273 0.04531409 0.04770474 0.04962748 0.05103565 0.05189471 0.052183451010939394B8.46923e-10 2.27422e-09 4.538951e-09 6.873791e-09 7.10904e-09 7.627841e-10 -1.98301e-08 -6.609942e-08 -1.531138e-07 -2.982495e-07 -5.181519e-07 -8.236356e-07 -1.212476e-06 -1.660555e-06 -2.112474e-06 -2.473531e-06 -2.605619e-06 -2.330134e-06 -1.44098e-06 2.698043e-07 2.972714e-06 6.748858e-06 1.152962e-05 1.703543e-05 2.272479e-05 2.776698e-05 3.105368e-05 3.126302e-05 2.698613e-05 1.691886e-05 1.111765e-07 -2.374559e-05 -5.402672e-05 -8.892268e-05 -0.0001252819 -0.0001586044 -0.0001832359 -0.000192798 -0.0001808636 -0.0001418502 -7.206699e-05 2.919376e-05 0.0001586655 0.0003083858 0.0004654217 0.0006121983 0.0007275471 0.0007885116 0.0007728578 0.000662123 0.000444926 0.0001201614 -0.0003003695 -0.0007903605 -0.001308874 -0.00180202 -0.002206618 -0.00245579 -0.002486192 -0.002246312 -0.001705031 -0.0008594633 0.0002590112 0.001581566 0.003002498 0.004384142 0.005566316 0.006380002 0.006664402 0.006286043 0.005158141 0.003258199 0.0006416821 -0.002550179 -0.006090065 -0.009672703 -0.01293168 -0.01546425 -0.01686256 -0.01674898 -0.01481246 -0.01084255 -0.004757783 0.003374886 0.01333183 0.02473738 0.03708165 0.0497507 0.06206696 0.07333685 0.08290152 0.0901866 0.09474635 0.09629848102111111112B-2.487704e-10 4.73744e-09 1.240319e-08 2.18423e-09 -2.973504e-08 -2.774098e-08 4.823501e-08 9.04852e-08 -4.377202e-08 -2.029251e-07 -2.93251e-08 3.576771e-07 2.380756e-07 -5.050541e-07 -6.566606e-07 5.295136e-07 1.333027e-06 -2.353909e-07 -2.234558e-06 -6.431887e-07 3.178848e-06 2.391894e-06 -3.766426e-06 -5.213131e-06 3.343879e-06 9.069842e-06 -1.034203e-06 -1.349476e-05 -4.128203e-06 1.740814e-05 1.293247e-05 -1.901116e-05 -2.55982e-05 1.583847e-05 4.129854e-05 -5.051362e-06 -5.769434e-05 -1.597343e-05 7.06232e-05 4.876212e-05 -7.411825e-05 -9.265601e-05 6.092806e-05 0.0001437042 -2.365713e-05 -0.0001938177 -4.346068e-05 0.0002305286 0.0001422997 -0.0002376966 -0.0002684195 0.00019742 0.0004089862 -9.322642e-05 -0.0005416667 -8.56506e-05 0.0006351459 0.0003394235 -0.0006517938 -0.0006531623 0.000552734 0.0009935334 -0.000305133 -0.00130802 -0.0001089867 0.001527642 0.0006836461 -0.001573808 -0.00138119 0.001369316 0.002127841 -0.0008527391 -0.002814326 -5.435981e-06 0.003302926 0.001187235 -0.003441493 -0.002615018 0.003083861 0.004145541 -0.002114933 -0.005572669 0.0004775432 0.006640171 0.001802538 -0.00706464 -0.004597381 0.00656694 0.007666904 -0.00490895 -0.01066029 0.001931064 0.01312888 0.002414871 -0.01454758 -0.008042475 0.01433728 0.01472206 -0.01187359 -0.02208895 0.00645106 0.0296694 0.002871719 -0.03692219 -0.01777635 0.04329151 0.04248361 -0.04826485 -0.09280869 0.05142858 0.3137774 0.447485810.0052250250251B1.8199372e-09 2.0202708e-09 -1.6974796e-09 -2.7906431e-09 1.7345323e-09 4.1969239e-09 -1.6853792e-09 -6.3909467e-09 1.2389344e-09 9.4915949e-09 -0 -1.3562537e-08 -2.5275546e-09 1.8577368e-08 6.9514622e-09 -2.4372031e-08 -1.3990807e-08 3.0585518e-08 2.4459706e-08 -3.6591251e-08 -3.9229615e-08 4.1423156e-08 5.9165161e-08 -4.3702295e-08 -8.5029034e-08 4.1571909e-08 1.1735267e-07 -3.2650685e-08 -1.5627138e-07 1.4015944e-08 2.0132505e-07 1.7770057e-08 -2.5122895e-07 -6.6576578e-08 3.0362335e-07 1.3654938e-07 -3.5481486e-07 -2.3187093e-07 3.9952804e-07 3.5642505e-07 -4.3069065e-07 -5.1335869e-07 4.3928098e-07 7.0453973e-07 -4.1427025e-07 -9.299164e-07 3.4269613e-07 1.1867936e-06 -2.0990502e-07 -1.4690514e-06 1.5778578e-21 1.7663441e-06 3.0347226e-07 -2.0633282e-06 -7.1656915e-07 2.3389837e-06 1.2537186e-06 -2.5661009e-06 -1.926203e-06 2.7110178e-06 2.7403778e-06 -2.7336969e-06 -3.6956646e-06 2.5882353e-06 4.7823814e-06 -2.2238961e-06 -5.9795051e-06 1.5867422e-06 7.252483e-06 -6.2193424e-07 -8.5512438e-06 -7.2326814e-07 9.8085791e-06 2.4957987e-06 -1.0939091e-05 -4.7324062e-06 1.183891e-05 7.4546907e-06 -1.2386398e-05 -1.0663696e-05 1.2444033e-05 1.4334252e-05 -1.1861668e-05 -1.8409337e-05 1.0481309e-05 2.2794756e-05 -8.1434895e-06 -2.7354526e-05 4.6953035e-06 3.1907369e-05 -8.0224322e-19 -3.6224742e-05 -6.0519879e-06 4.0030883e-05 1.3530859e-05 -4.3005275e-05 -2.2454486e-05 4.478796e-05 3.2775174e-05 -4.4988014e-05 -4.4366774e-05 4.3195426e-05 5.701285e-05 -3.8996491e-05 -7.0396723e-05 3.1992661e-05 8.4094262e-05 -2.1822623e-05 -9.7570314e-05 8.187167e-06 0.00011017967 9.1238133e-06 -0.00012117337 -3.0203071e-05 0.00012971111 5.4997605e-05 -0.00013488027 -8.3281406e-05 0.00013572187 0.00011463083 -0.0001312636 -0.00014840422 0.00012055959 0.00018372736 -0.00010273636 -0.00021948666 7.7043899e-05 0.00025433136 -4.291058e-05 -0.00028668656 5.3159825e-18 0.00031477802 5.1732295e-05 -0.00033666971 -0.00011198474 0.00035031454 0.00018006107 -0.00035361821 -0.00025482989 0.00034451546 0.00033469556 -0.00032105758 -0.00041758329 0.00028150912 0.00050094114 -0.00022445126 -0.00058176149 0.0001488886 0.00065662374 -5.4355677e-05 -0.00072175978 -5.8981361e-05 0.00077314261 0.00019023226 -0.00080659804 -0.00033770447 0.00081793807 0.0004988441 -0.00080311386 -0.00067020079 0.00075838487 0.00084742099 -0.00068050007 -0.0010252734 0.00056688581 0.0011977097 -0.00041583438 -0.0013579626 0.00022668641 0.0014986822 -1.8335977e-17 -0.0016121102 -0.00026230119 0.0016902895 0.00055680811 -0.0017253073 -0.00087854217 0.0017095655 0.0012209133 -0.0016360726 -0.0015757256 0.0014987495 0.0019332359 -0.0012927411 -0.0022822671 0.0010147233 0.0026103783 -0.000663196 -0.0029040911 0.00023875109 0.0031491692 0.0002556935 -0.0033309466 -0.00081470421 0.0034346978 0.0014301943 -0.0034460417 -0.0020913454 0.0033513677 0.0027845927 -0.0031382736 -0.0034936758 0.0027960005 0.0041997545 -0.0023158525 -0.004881585 0.0016915861 0.0055157483 -0.00091975531 -0.0060769165 3.7169064e-17 0.0065381401 0.0010647358 -0.0068711279 -0.0022680649 0.0070464864 0.0036001068 -0.0070338695 -0.0050475309 0.006801974 0.0065936972 -0.0063182843 -0.0082188945 0.005548427 0.009900672 -0.0044549145 -0.011614259 0.0029949171 0.013333064 -0.001116443 -0.015029239 -0.0012482048 0.016674297 0.0041938736 -0.018239765 -0.0078633984 0.019697853 0.012487836 -0.021022123 -0.018470643 0.022188136 0.026583973 -0.023174062 -0.038501124 0.023961223 0.058606029 -0.024534573 -0.10303891 0.024883077 0.31728325 0.47510.0055250250251B-1.39149e-09 -7.58686e-11 1.42009e-09 2.8499e-09 3.81908e-09 3.82158e-09 2.40642e-09 -5.62446e-10 -4.65516e-09 -8.76997e-09 -1.12887e-08 -1.05318e-08 -5.42964e-09 3.79412e-09 1.52108e-08 2.53115e-08 2.9805e-08 2.50002e-08 9.42529e-09 -1.48701e-08 -4.19283e-08 -6.28479e-08 -6.81492e-08 -5.10752e-08 -1.08782e-08 4.51276e-08 1.01857e-07 1.39636e-07 1.39934e-07 9.22493e-08 0 -1.16958e-07 -2.24965e-07 -2.84726e-07 -2.63531e-07 -1.48229e-07 4.53934e-08 2.70989e-07 4.60195e-07 5.40774e-07 4.60318e-07 2.08621e-07 -1.69272e-07 -5.75882e-07 -8.8253e-07 -9.65763e-07 -7.4987e-07 -2.42383e-07 4.50914e-07 1.14077e-06 1.6002e-06 1.63205e-06 1.14026e-06 1.81587e-07 -1.02313e-06 -2.1298e-06 -2.76043e-06 -2.62059e-06 -1.61143e-06 1.01809e-07 2.09524e-06 3.77739e-06 4.55134e-06 4.00753e-06 2.08937e-06 -8.27037e-07 -3.97989e-06 -6.40177e-06 -7.19663e-06 -5.84004e-06 -2.40916e-06 2.34349e-06 7.12172e-06 1.04133e-05 1.09391e-05 8.09825e-06 2.26575e-06 -5.17632e-06 -1.21244e-05 -1.63126e-05 -1.60082e-05 -1.06402e-05 -1.15207e-06 1.00758e-05 1.97706e-05 2.46729e-05 2.25673e-05 1.31282e-05 -1.71349e-06 -1.80656e-05 -3.1029e-05 -3.60999e-05 -3.06364e-05 -1.49362e-05 7.46662e-06 3.04828e-05 4.70395e-05 5.11634e-05 3.99876e-05 1.50404e-05 -1.76771e-05 -4.90014e-05 -6.90681e-05 -7.02942e-05 -5.00131e-05 -1.18976e-05 3.442e-05 7.56274e-05 9.84242e-05 9.36431e-05 5.95676e-05 3.32071e-06 -6.03277e-05 -0.000112657 -0.000136331 -0.000120901 -6.67919e-05 1.36399e-05 9.86077e-05 0.000162583 0.000183745 0.000151085 6.89265e-05 -4.27924e-05 -0.000153016 -0.000227947 -0.000241128 -0.00018229 -6.21289e-05 8.8865e-05 0.00022777 0.000311127 0.000308164 0.000211428 4.13094e-05 -0.000157516 -0.000327398 -0.000414057 -0.000383439 -0.000233963 -5.80249e-19 0.000255283 0.00045651 0.000537896 0.000464096 0.000243649 -6.97286e-05 -0.000389464 -0.000619511 -0.000682643 -0.00054547 -0.000232302 0.000177297 0.000567927 0.000820244 0.000846718 0.000620724 0.0001896 -0.000333597 -0.000798879 -0.0010616 -0.00102653 -0.0006805 -0.000102901 0.000550941 0.00109059 0.00134512 0.00121604 0.000712561 -4.29288e-05 -0.000843023 -0.00145118 -0.0016706 -0.00140633 -0.000701414 0.000265767 0.00122498 0.00188842 0.00203577 0.00158516 0.000627816 -0.000586667 -0.00171367 -0.00240985 -0.00243608 -0.00173643 -0.000468035 0.0010307 0.00232842 0.00302317 0.00286459 0.00183949 0.00019253 -0.00162863 -0.00309267 -0.00373728 -0.003312 -0.00186788 0.000236491 0.00242012 0.00403712 0.00456454 0.00376695 0.00178704 -0.000869851 -0.00346015 -0.00520617 -0.00552506 -0.00421637 -0.00154964 0.00178105 0.00483181 0.00667071 0.00665542 0.00464607 0.00108544 -0.00308688 -0.006674 -0.00855587 -0.0080274 -0.00504142 -0.000277813 0.00499557 0.00924648 0.0111073 0.00979324 0.00538817 -0.00109786 -0.00793467 -0.0131072 -0.0148763 -0.0123158 -0.00567323 0.0035677 0.012968 0.0197157 0.0213811 0.016657 0.00588547 -0.00878861 -0.0237145 -0.0344482 -0.0367397 -0.0275739 -0.00601637 0.0263517 0.0655388 0.105794 0.140681 0.164351 0.17272710.0564381.4912381.4912382C1.16415e-10 5.82077e-10 1.74623e-09 4.07454e-09 8.14907e-09 1.46683e-08 2.44472e-08 3.84171e-08 5.76256e-08 8.3237e-08 1.16532e-07 1.58907e-07 2.11876e-07 2.77068e-07 3.56231e-07 4.51226e-07 5.64032e-07 6.96746e-07 8.51578e-07 1.03086e-06 1.23703e-06 1.47265e-06 1.74041e-06 2.04309e-06 2.3836e-06 2.76498e-06 3.19036e-06 3.66301e-06 4.18629e-06 4.76371e-06 5.39888e-06 6.09551e-06 6.85744e-06 7.68865e-06 8.5932e-06 9.57528e-06 1.06392e-05 1.17894e-05 1.30304e-05 1.43668e-05 1.58035e-05 1.73453e-05 1.89972e-05 2.07644e-05 2.26521e-05 2.46656e-05 2.68104e-05 2.90922e-05 3.15165e-05 3.40893e-05 3.68165e-05 3.9704e-05 4.27582e-05 4.59852e-05 4.93915e-05 5.29836e-05 5.67682e-05 6.07519e-05 6.49417e-05 6.93445e-05 7.39675e-05 7.88178e-05 8.39029e-05 8.923e-05 9.48064e-05 0.000100639 0.000106734 0.000113097 0.000119734 0.000126651 0.000133853 0.000141343 0.000149127 0.000157209 0.000165592 0.000174279 0.000183273 0.000192577 0.000202192 0.000212122 0.000222367 0.000232929 0.000243808 0.000255005 0.00026652 0.000278352 0.000290502 0.000302967 0.000315748 0.000328842 0.000342247 0.000355961 0.000369982 0.000384306 0.000398929 0.000413849 0.000429061 0.00044456 0.000460343 0.000476403 0.000492736 0.000509334 0.000526194 0.000543307 0.000560667 0.000578266 0.000596098 0.000614154 0.000632426 0.000650906 0.000669583 0.00068845 0.000707496 0.000726712 0.000746086 0.000765609 0.000785269 0.000805055 0.000824955 0.000844957 0.000865049 0.000885217 0.000905449 0.000925731 0.000946049 0.000966389 0.000986736 0.00100708 0.00102739 0.00104768 0.00106791 0.00108808 0.00110817 0.00112817 0.00114807 0.00116786 0.00118752 0.00120704 0.00122641 0.00124563 0.00126468 0.00128354 0.00130222 0.0013207 0.00133897 0.00135703 0.00137486 0.00139246 0.00140982 0.00142693 0.00144379 0.00146039 0.00147672 0.00149278 0.00150856 0.00152406 0.00153928 0.0015542 0.00156882 0.00158314 0.00159716 0.00161088 0.00162428 0.00163738 0.00165016 0.00166262 0.00167477 0.00168661 0.00169812 0.00170932 0.0017202 0.00173076 0.001741 0.00175093 0.00176055 0.00176985 0.00177885 0.00178753 0.00179592 0.001804 0.00181178 0.00181927 0.00182647 0.00183339 0.00184003 0.00184639 0.00185249 0.00185832 0.00186389 0.00186922 0.00187431 0.00187916 0.00188378 0.00188818 0.00189237 0.00189636 0.00190014 0.00190373 0.00190714 0.00191037 0.00191342 0.00191631 0.00191904 0.00192161 0.00192403 0.00192631 0.00192846 0.00193047 0.00193236 0.00193413 0.00193578 0.00193732 0.00193876 0.00194009 0.00194134 0.00194249 0.00194355 0.00194453 0.00194544 0.00194627 0.00194703 0.00194773 0.00194836 0.00194894 0.00194946 0.00194993 0.00195036 0.00195074 0.00195108 0.00195138 0.00195165 0.00195189 0.00195209 0.00195227 0.00195243 0.00195256 0.00195267 0.00195277 0.00195285 0.00195291 0.00195297 0.00195301 0.00195304 0.00195307 0.00195309 0.0019531 0.00195311 0.00195312 0.00195312 0.00195312 0.00195312 0.00195312 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.0019531310.055250250251B-7.88438e-08 -3.87711e-07 -1.07756e-06 -2.43852e-06 -4.68994e-06 -8.00126e-06 -1.23208e-05 -1.73208e-05 -2.23362e-05 -2.64273e-05 -2.85307e-05 -2.77164e-05 -2.34812e-05 -1.60063e-05 -6.27891e-06 3.99356e-06 1.27031e-05 1.78474e-05 1.81034e-05 1.33002e-05 4.62263e-06 -5.56652e-06 -1.42934e-05 -1.88069e-05 -1.74497e-05 -1.02877e-05 7.4244e-07 1.22795e-05 2.05051e-05 2.23709e-05 1.67006e-05 4.77615e-06 -9.8538e-06 -2.23673e-05 -2.82102e-05 -2.47091e-05 -1.21984e-05 5.79021e-06 2.34449e-05 3.44952e-05 3.43866e-05 2.20736e-05 7.47099e-07 -2.28756e-05 -4.05969e-05 -4.55243e-05 -3.4671e-05 -1.04147e-05 1.98298e-05 4.57849e-05 5.77342e-05 5.00793e-05 2.37547e-05 -1.34886e-05 -4.92402e-05 -7.04824e-05 -6.82456e-05 -4.12315e-05 3.01364e-06 5.00294e-05 8.30645e-05 8.89519e-05 6.3211e-05 1.24426e-05 -4.70998e-05 -9.4582e-05 -0.000111775 -8.99122e-05 -3.36888e-05 3.93153e-05 0.000103961 0.000136086 0.000121395 6.14824e-05 -2.54584e-05 -0.000109935 -0.000161017 -0.000157513 -9.64839e-05 4.26116e-06 0.000111051 0.000185434 0.000197867 0.000139195 2.55374e-05 -0.000105707 -0.000207954 -0.000241795 -0.000189944 -6.51798e-05 9.21494e-05 0.000226911 0.00028831 0.000248801 0.00011581 -6.85278e-05 -0.000240381 -0.000336104 -0.000315562 -0.000178444 3.29181e-05 0.000246185 0.000383506 0.000389683 0.000253903 1.66227e-05 -0.000241908 -0.000428477 -0.000470247 -0.000342765 -8.20224e-05 0.000224907 0.000468574 0.000555884 0.000445258 0.000165077 -0.000192412 -0.000501016 -0.000644806 -0.00056127 -0.000267456 0.000141504 0.000522632 0.000734722 0.000690237 0.000390583 -6.92177e-05 -0.000529942 -0.000822873 -0.000831154 -0.000535631 -2.74513e-05 0.000519131 0.000905995 0.000982512 0.000703469 0.000151524 -0.000486035 -0.000980245 -0.00114219 -0.000894521 -0.000305903 0.00042625 0.00104127 0.00130748 0.00110877 0.000493362 -0.000335131 -0.00108419 -0.00147506 -0.00134572 -0.00071651 0.000207813 0.00110362 0.00164096 0.00160437 0.000977803 -3.91838e-05 -0.00109357 -0.00180058 -0.00188325 -0.00127957 -0.000176184 0.00104744 0.00194853 0.00218027 0.00162395 0.000444026 -0.000957926 -0.00207866 -0.00249283 -0.00201302 -0.000770496 0.00081698 0.00218406 0.0028179 0.00244904 0.00116253 -0.000615432 -0.00225679 -0.00315198 -0.00293459 -0.00162818 0.000342627 0.00228766 0.00349108 0.00347284 0.002177 1.40039e-05 -0.002266 -0.00383099 -0.00406836 -0.00282127 -0.000470104 0.00217891 0.00416723 0.00472786 0.00357736 0.00104599 -0.00201021 -0.00449502 -0.00546148 -0.00446801 -0.00176905 0.00173888 0.00480975 0.00628555 0.00552689 0.00267878 -0.00133558 -0.0051066 -0.00722627 -0.00680569 -0.00383477 0.00075716 0.00538107 0.00832797 0.00838876 0.00533344 6.52303e-05 -0.0056288 -0.00966853 -0.0104225 -0.00734239 -0.00125104 0.00584581 0.0113949 0.0131838 0.0101823 0.00303141 -0.00602853 -0.0138142 -0.0172612 -0.014553 -0.00592461 0.00617394 0.0176795 0.0241565 0.0223247 0.0113903 -0.0062796 -0.0254283 -0.0391378 -0.0407208 -0.0257548 0.00634372 0.0517102 0.102629 0.149235 0.181898 0.19363510.054250250251B6.04198e-08 6.33652e-08 -1.89605e-07 -1.13009e-06 -3.34284e-06 -7.37823e-06 -1.34267e-05 -2.09679e-05 -2.85801e-05 -3.40969e-05 -3.51992e-05 -3.03245e-05 -1.95663e-05 -5.11978e-06 9.08354e-06 1.86067e-05 2.02385e-05 1.3519e-05 1.29145e-06 -1.11648e-05 -1.82459e-05 -1.65512e-05 -6.6511e-06 6.88016e-06 1.74012e-05 1.93662e-05 1.12372e-05 -3.38814e-06 -1.7217e-05 -2.27876e-05 -1.64303e-05 -5.86584e-07 1.6938e-05 2.67062e-05 2.26466e-05 5.71248e-06 -1.59538e-05 -3.08077e-05 -2.99596e-05 -1.23747e-05 1.37685e-05 3.4721e-05 3.82882e-05 2.08051e-05 -9.96608e-06 -3.80626e-05 -4.74787e-05 -3.11579e-05 4.16643e-06 4.0429e-05 5.73234e-05 4.35411e-05 4.00752e-06 -4.13669e-05 -6.75336e-05 -5.79903e-05 -1.49057e-05 4.03938e-05 7.77542e-05 7.44773e-05 2.88534e-05 -3.6998e-05 -8.75642e-05 -9.29105e-05 -4.61556e-05 3.0627e-05 9.64557e-05 0.000113107 6.70669e-05 -2.07133e-05 -0.000103851 -0.0001348 -9.17924e-05 6.67344e-06 0.000109098 0.000157629 0.000120481 1.20934e-05 -0.000111452 -0.000181109 -0.000153182 -3.61624e-05 0.000110112 0.00020465 0.000189851 6.60817e-05 -0.000104223 -0.000227554 -0.00023035 -0.000102376 9.28523e-05 0.00024898 0.00027439 0.000145485 -7.5059e-05 -0.000267995 -0.000321573 -0.000195795 4.98499e-05 0.000283523 0.000371335 0.000253579 -1.62416e-05 -0.000294402 -0.000422981 -0.000319027 -2.67755e-05 0.000299325 0.000475622 0.00039217 8.01641e-05 -0.000296911 -0.000528224 -0.000472926 -0.000144888 0.000285637 0.000579519 0.000560989 0.000221794 -0.000263942 -0.000628091 -0.000655885 -0.000311652 0.000230209 0.000672356 0.000756961 0.000415144 -0.000182749 -0.00071053 -0.000863335 -0.000532811 0.000119865 0.000740687 0.000973943 0.000665098 -3.9786e-05 -0.000760673 -0.00108745 -0.000812264 -5.92573e-05 0.000768151 0.00120223 0.000974347 0.000178976 -0.000760662 -0.00131647 -0.00115122 -0.000321074 0.000735584 0.00142811 0.00134258 0.000487255 -0.000690105 -0.00153478 -0.00154791 -0.000679199 0.000621227 0.00163385 0.00176646 0.000898555 -0.000525758 -0.00172237 -0.00199723 -0.00114693 0.000400337 0.00179711 0.00223902 0.00142593 -0.000241375 -0.00185455 -0.00249049 -0.0017373 4.49092e-05 0.00189074 0.00275003 0.00208287 0.00019341 -0.00190127 -0.0030158 -0.00246459 -0.000478354 0.0018813 0.00328589 0.00288485 0.00081545 -0.00182535 -0.00355822 -0.00334659 -0.00121127 0.00172696 0.00383049 0.00385341 0.00167358 -0.00157872 -0.00410041 -0.00441021 -0.00221215 0.00137164 0.00436564 0.00502367 0.00283966 -0.00109438 -0.00462362 -0.00570287 -0.0035726 0.000732661 0.00487206 0.00646113 0.00443399 -0.000267052 -0.00510842 -0.00731759 -0.00545616 -0.000329207 0.00533045 0.00830124 0.006687 0.00109529 -0.00553586 -0.00945706 -0.00820017 -0.00209118 0.00572257 0.0108581 0.0101153 0.00341474 -0.00588862 -0.0126301 -0.0126401 -0.00523789 0.00603227 0.0150066 0.0161681 0.0078942 -0.00615197 -0.0184733 -0.0215451 -0.0121279 0.00624643 0.0242265 0.0309733 0.0200073 -0.00631466 -0.0362021 -0.0525343 -0.0402219 0.0063559 0.0793416 0.158982 0.220515 0.2436310.052250250251B3.48031e-07 8.92428e-08 -5.82758e-06 -2.38526e-05 -4.97601e-05 -6.17555e-05 -3.91885e-05 5.88062e-06 3.10265e-05 1.28593e-05 -1.85067e-05 -1.81337e-05 9.4521e-06 1.87862e-05 -3.57992e-06 -1.81667e-05 -3.11841e-07 1.74219e-05 3.12364e-06 -1.6897e-05 -5.43947e-06 1.66206e-05 7.59389e-06 -1.65316e-05 -9.77688e-06 1.65563e-05 1.21136e-05 -1.66004e-05 -1.46621e-05 1.65891e-05 1.74555e-05 -1.64599e-05 -2.05224e-05 1.61366e-05 2.38584e-05 -1.55628e-05 -2.74541e-05 1.46947e-05 3.13092e-05 -1.34739e-05 -3.5403e-05 1.18553e-05 3.97139e-05 -9.8029e-06 -4.42339e-05 7.26053e-06 4.89349e-05 -4.17755e-06 -5.37778e-05 5.18995e-07 5.87351e-05 3.76157e-06 -6.37699e-05 -8.71152e-06 6.88319e-05 1.43636e-05 -7.38795e-05 -2.07564e-05 7.88692e-05 2.79385e-05 -8.37384e-05 -3.59474e-05 8.84178e-05 4.48068e-05 -9.28461e-05 -5.45431e-05 9.69635e-05 6.51929e-05 -0.000100694 -7.67877e-05 0.000103948 8.93421e-05 -0.000106642 -0.000102868 0.000108688 0.000117379 -0.000109993 -0.000132878 0.000110458 0.000149362 -0.000109991 -0.000166834 0.000108481 0.000185285 -0.000105807 -0.000204682 0.000101863 0.000224993 -9.65461e-05 -0.000246194 8.97361e-05 0.000268249 -8.13017e-05 -0.000291099 7.11181e-05 0.000314672 -5.90729e-05 -0.0003389 4.50497e-05 0.000363711 -2.89196e-05 -0.000389018 1.05522e-05 0.000414715 1.01741e-05 -0.000440684 -3.33719e-05 0.000466808 5.91546e-05 -0.000492966 -8.7649e-05 0.000519011 0.000118976 -0.000544775 -0.000153231 0.000570094 0.000190507 -0.00059481 -0.00023092 0.000618731 0.000274569 -0.000641639 -0.000321526 0.00066333 0.000371863 -0.000683597 -0.000425662 0.000702206 0.000482996 -0.0007189 -0.000543909 0.000733424 0.000608437 -0.000745529 -0.000676626 0.000754932 0.000748507 -0.000761332 -0.000824073 0.000764451 0.000903334 -0.000763987 -0.0009863 0.000759593 0.00107293 -0.000750952 -0.00116316 0.00073775 0.001257 -0.000719615 -0.00135436 0.00069618 0.00145515 -0.000667101 -0.00155931 0.000631977 0.00166673 -0.000590411 -0.00177727 0.000542033 0.00189085 -0.000486382 -0.00200729 0.00042305 0.00212645 -0.000351567 -0.00224814 0.000271521 0.00237226 -0.0001823 -0.00249846 8.36105e-05 0.00262677 2.53518e-05 -0.00275672 -0.000144942 0.00288823 0.000275804 -0.00302111 -0.000418701 0.00315494 0.000574163 -0.00328957 -0.000742955 0.00342479 0.000925993 -0.00356021 -0.00112408 0.00369556 0.00133812 -0.00383065 -0.00156928 0.00396515 0.00181881 -0.0040987 -0.00208799 0.00423105 0.00237836 -0.00436194 -0.00269177 0.00449105 0.0030303 -0.00461804 -0.00339629 0.00474259 0.00379255 -0.00486446 -0.00422243 0.00498336 0.00468996 -0.00509897 -0.00519994 0.00521097 0.00575821 -0.00531911 -0.00637197 0.00542314 0.00705024 -0.00552281 -0.00780439 0.00561783 0.00864895 -0.00570793 -0.00960272 0.00579289 0.0106907 -0.0058725 -0.0119464 0.00594657 0.0134167 -0.00601488 -0.0151678 0.00607725 0.0172973 -0.00613351 -0.0199551 0.00618351 0.0233836 -0.00622712 -0.0280013 0.00626424 0.0345995 -0.00629475 -0.0448737 0.00631856 0.0632332 -0.00633559 -0.105846 0.00634583 0.318224 0.493651102041001001B11064381.4912381.4912711A1.16415e-10 5.82077e-10 1.74623e-09 4.07454e-09 8.14907e-09 1.46683e-08 2.44472e-08 3.84171e-08 5.76256e-08 8.3237e-08 1.16532e-07 1.58907e-07 2.11876e-07 2.77068e-07 3.56231e-07 4.51226e-07 5.64032e-07 6.96746e-07 8.51578e-07 1.03086e-06 1.23703e-06 1.47265e-06 1.74041e-06 2.04309e-06 2.3836e-06 2.76498e-06 3.19036e-06 3.66301e-06 4.18629e-06 4.76371e-06 5.39888e-06 6.09551e-06 6.85744e-06 7.68865e-06 8.5932e-06 9.57528e-06 1.06392e-05 1.17894e-05 1.30304e-05 1.43668e-05 1.58035e-05 1.73453e-05 1.89972e-05 2.07644e-05 2.26521e-05 2.46656e-05 2.68104e-05 2.90922e-05 3.15165e-05 3.40893e-05 3.68165e-05 3.9704e-05 4.27582e-05 4.59852e-05 4.93915e-05 5.29836e-05 5.67682e-05 6.07519e-05 6.49417e-05 6.93445e-05 7.39675e-05 7.88178e-05 8.39029e-05 8.923e-05 9.48064e-05 0.000100639 0.000106734 0.000113097 0.000119734 0.000126651 0.000133853 0.000141343 0.000149127 0.000157209 0.000165592 0.000174279 0.000183273 0.000192577 0.000202192 0.000212122 0.000222367 0.000232929 0.000243808 0.000255005 0.00026652 0.000278352 0.000290502 0.000302967 0.000315748 0.000328842 0.000342247 0.000355961 0.000369982 0.000384306 0.000398929 0.000413849 0.000429061 0.00044456 0.000460343 0.000476403 0.000492736 0.000509334 0.000526194 0.000543307 0.000560667 0.000578266 0.000596098 0.000614154 0.000632426 0.000650906 0.000669583 0.00068845 0.000707496 0.000726712 0.000746086 0.000765609 0.000785269 0.000805055 0.000824955 0.000844957 0.000865049 0.000885217 0.000905449 0.000925731 0.000946049 0.000966389 0.000986736 0.00100708 0.00102739 0.00104768 0.00106791 0.00108808 0.00110817 0.00112817 0.00114807 0.00116786 0.00118752 0.00120704 0.00122641 0.00124563 0.00126468 0.00128354 0.00130222 0.0013207 0.00133897 0.00135703 0.00137486 0.00139246 0.00140982 0.00142693 0.00144379 0.00146039 0.00147672 0.00149278 0.00150856 0.00152406 0.00153928 0.0015542 0.00156882 0.00158314 0.00159716 0.00161088 0.00162428 0.00163738 0.00165016 0.00166262 0.00167477 0.00168661 0.00169812 0.00170932 0.0017202 0.00173076 0.001741 0.00175093 0.00176055 0.00176985 0.00177885 0.00178753 0.00179592 0.001804 0.00181178 0.00181927 0.00182647 0.00183339 0.00184003 0.00184639 0.00185249 0.00185832 0.00186389 0.00186922 0.00187431 0.00187916 0.00188378 0.00188818 0.00189237 0.00189636 0.00190014 0.00190373 0.00190714 0.00191037 0.00191342 0.00191631 0.00191904 0.00192161 0.00192403 0.00192631 0.00192846 0.00193047 0.00193236 0.00193413 0.00193578 0.00193732 0.00193876 0.00194009 0.00194134 0.00194249 0.00194355 0.00194453 0.00194544 0.00194627 0.00194703 0.00194773 0.00194836 0.00194894 0.00194946 0.00194993 0.00195036 0.00195074 0.00195108 0.00195138 0.00195165 0.00195189 0.00195209 0.00195227 0.00195243 0.00195256 0.00195267 0.00195277 0.00195285 0.00195291 0.00195297 0.00195301 0.00195304 0.00195307 0.00195309 0.0019531 0.00195311 0.00195312 0.00195312 0.00195312 0.00195312 0.00195312 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195312 0.00195312 0.00195312 0.00195312 0.00195312 0.00195311 0.0019531 0.00195309 0.00195307 0.00195304 0.00195301 0.00195297 0.00195291 0.00195285 0.00195277 0.00195267 0.00195256 0.00195243 0.00195227 0.00195209 0.00195189 0.00195165 0.00195138 0.00195108 0.00195074 0.00195036 0.00194993 0.00194946 0.00194894 0.00194836 0.00194773 0.00194703 0.00194627 0.00194544 0.00194453 0.00194355 0.00194249 0.00194134 0.00194009 0.00193876 0.00193732 0.00193578 0.00193413 0.00193236 0.00193047 0.00192846 0.00192631 0.00192403 0.00192161 0.00191904 0.00191631 0.00191342 0.00191037 0.00190714 0.00190373 0.00190014 0.00189636 0.00189237 0.00188818 0.00188378 0.00187916 0.00187431 0.00186922 0.00186389 0.00185832 0.00185249 0.00184639 0.00184003 0.00183339 0.00182647 0.00181927 0.00181178 0.001804 0.00179592 0.00178753 0.00177885 0.00176985 0.00176055 0.00175093 0.001741 0.00173076 0.0017202 0.00170932 0.00169812 0.00168661 0.00167477 0.00166262 0.00165016 0.00163738 0.00162428 0.00161088 0.00159716 0.00158314 0.00156882 0.0015542 0.00153928 0.00152406 0.00150856 0.00149278 0.00147672 0.00146039 0.00144379 0.00142693 0.00140982 0.00139246 0.00137486 0.00135703 0.00133897 0.0013207 0.00130222 0.00128354 0.00126468 0.00124563 0.00122641 0.00120704 0.00118752 0.00116786 0.00114807 0.00112817 0.00110817 0.00108808 0.00106791 0.00104768 0.00102739 0.00100708 0.000986736 0.000966389 0.000946049 0.000925731 0.000905449 0.000885217 0.000865049 0.000844957 0.000824955 0.000805055 0.000785269 0.000765609 0.000746086 0.000726712 0.000707496 0.00068845 0.000669583 0.000650906 0.000632426 0.000614154 0.000596098 0.000578266 0.000560667 0.000543307 0.000526194 0.000509334 0.000492736 0.000476403 0.000460343 0.00044456 0.000429061 0.000413849 0.000398929 0.000384306 0.000369982 0.000355961 0.000342247 0.000328842 0.000315748 0.000302967 0.000290502 0.000278352 0.00026652 0.000255005 0.000243808 0.000232929 0.000222367 0.000212122 0.000202192 0.000192577 0.000183273 0.000174279 0.000165592 0.000157209 0.000149127 0.000141343 0.000133853 0.000126651 0.000119734 0.000113097 0.000106734 0.000100639 9.48064e-05 8.923e-05 8.39029e-05 7.88178e-05 7.39675e-05 6.93445e-05 6.49417e-05 6.07519e-05 5.67682e-05 5.29836e-05 4.93915e-05 4.59852e-05105250250251B-7.88438e-08 -3.87711e-07 -1.07756e-06 -2.43852e-06 -4.68994e-06 -8.00126e-06 -1.23208e-05 -1.73208e-05 -2.23362e-05 -2.64273e-05 -2.85307e-05 -2.77164e-05 -2.34812e-05 -1.60063e-05 -6.27891e-06 3.99356e-06 1.27031e-05 1.78474e-05 1.81034e-05 1.33002e-05 4.62263e-06 -5.56652e-06 -1.42934e-05 -1.88069e-05 -1.74497e-05 -1.02877e-05 7.4244e-07 1.22795e-05 2.05051e-05 2.23709e-05 1.67006e-05 4.77615e-06 -9.8538e-06 -2.23673e-05 -2.82102e-05 -2.47091e-05 -1.21984e-05 5.79021e-06 2.34449e-05 3.44952e-05 3.43866e-05 2.20736e-05 7.47099e-07 -2.28756e-05 -4.05969e-05 -4.55243e-05 -3.4671e-05 -1.04147e-05 1.98298e-05 4.57849e-05 5.77342e-05 5.00793e-05 2.37547e-05 -1.34886e-05 -4.92402e-05 -7.04824e-05 -6.82456e-05 -4.12315e-05 3.01364e-06 5.00294e-05 8.30645e-05 8.89519e-05 6.3211e-05 1.24426e-05 -4.70998e-05 -9.4582e-05 -0.000111775 -8.99122e-05 -3.36888e-05 3.93153e-05 0.000103961 0.000136086 0.000121395 6.14824e-05 -2.54584e-05 -0.000109935 -0.000161017 -0.000157513 -9.64839e-05 4.26116e-06 0.000111051 0.000185434 0.000197867 0.000139195 2.55374e-05 -0.000105707 -0.000207954 -0.000241795 -0.000189944 -6.51798e-05 9.21494e-05 0.000226911 0.00028831 0.000248801 0.00011581 -6.85278e-05 -0.000240381 -0.000336104 -0.000315562 -0.000178444 3.29181e-05 0.000246185 0.000383506 0.000389683 0.000253903 1.66227e-05 -0.000241908 -0.000428477 -0.000470247 -0.000342765 -8.20224e-05 0.000224907 0.000468574 0.000555884 0.000445258 0.000165077 -0.000192412 -0.000501016 -0.000644806 -0.00056127 -0.000267456 0.000141504 0.000522632 0.000734722 0.000690237 0.000390583 -6.92177e-05 -0.000529942 -0.000822873 -0.000831154 -0.000535631 -2.74513e-05 0.000519131 0.000905995 0.000982512 0.000703469 0.000151524 -0.000486035 -0.000980245 -0.00114219 -0.000894521 -0.000305903 0.00042625 0.00104127 0.00130748 0.00110877 0.000493362 -0.000335131 -0.00108419 -0.00147506 -0.00134572 -0.00071651 0.000207813 0.00110362 0.00164096 0.00160437 0.000977803 -3.91838e-05 -0.00109357 -0.00180058 -0.00188325 -0.00127957 -0.000176184 0.00104744 0.00194853 0.00218027 0.00162395 0.000444026 -0.000957926 -0.00207866 -0.00249283 -0.00201302 -0.000770496 0.00081698 0.00218406 0.0028179 0.00244904 0.00116253 -0.000615432 -0.00225679 -0.00315198 -0.00293459 -0.00162818 0.000342627 0.00228766 0.00349108 0.00347284 0.002177 1.40039e-05 -0.002266 -0.00383099 -0.00406836 -0.00282127 -0.000470104 0.00217891 0.00416723 0.00472786 0.00357736 0.00104599 -0.00201021 -0.00449502 -0.00546148 -0.00446801 -0.00176905 0.00173888 0.00480975 0.00628555 0.00552689 0.00267878 -0.00133558 -0.0051066 -0.00722627 -0.00680569 -0.00383477 0.00075716 0.00538107 0.00832797 0.00838876 0.00533344 6.52303e-05 -0.0056288 -0.00966853 -0.0104225 -0.00734239 -0.00125104 0.00584581 0.0113949 0.0131838 0.0101823 0.00303141 -0.00602853 -0.0138142 -0.0172612 -0.014553 -0.00592461 0.00617394 0.0176795 0.0241565 0.0223247 0.0113903 -0.0062796 -0.0254283 -0.0391378 -0.0407208 -0.0257548 0.00634372 0.0517102 0.102629 0.149235 0.181898 0.193635104250250251B6.041977e-08 6.336515e-08 -1.896046e-07 -1.13009e-06 -3.342843e-06 -7.378235e-06 -1.342666e-05 -2.096791e-05 -2.858005e-05 -3.409687e-05 -3.519921e-05 -3.032451e-05 -1.956631e-05 -5.119783e-06 9.083544e-06 1.860667e-05 2.023846e-05 1.351897e-05 1.29145e-06 -1.116479e-05 -1.82459e-05 -1.655122e-05 -6.651096e-06 6.880165e-06 1.740118e-05 1.936621e-05 1.123722e-05 -3.38814e-06 -1.721696e-05 -2.278759e-05 -1.643032e-05 -5.865839e-07 1.693801e-05 2.670617e-05 2.264663e-05 5.712479e-06 -1.595383e-05 -3.080774e-05 -2.995957e-05 -1.237471e-05 1.376846e-05 3.472099e-05 3.828815e-05 2.080508e-05 -9.966078e-06 -3.806259e-05 -4.747871e-05 -3.115793e-05 4.166433e-06 4.042897e-05 5.732344e-05 4.354106e-05 4.007523e-06 -4.136685e-05 -6.753361e-05 -5.799029e-05 -1.49057e-05 4.039378e-05 7.775423e-05 7.447732e-05 2.88534e-05 -3.699799e-05 -8.756417e-05 -9.291053e-05 -4.61556e-05 3.0627e-05 9.64557e-05 0.0001131071 6.706689e-05 -2.071326e-05 -0.0001038511 -0.0001347998 -9.179245e-05 6.673439e-06 0.0001090978 0.0001576288 0.0001204809 1.209341e-05 -0.0001114518 -0.000181109 -0.0001531816 -3.616235e-05 0.0001101123 0.0002046499 0.0001898509 6.608167e-05 -0.0001042225 -0.0002275542 -0.00023035 -0.0001023763 9.285231e-05 0.0002489805 0.00027439 0.000145485 -7.505903e-05 -0.0002679948 -0.0003215734 -0.000195795 4.984994e-05 0.0002835235 0.0003713355 0.0002535792 -1.624156e-05 -0.0002944016 -0.0004229812 -0.0003190268 -2.677548e-05 0.0002993248 0.0004756219 0.0003921702 8.016412e-05 -0.0002969113 -0.0005282237 -0.0004729265 -0.0001448876 0.0002856373 0.0005795195 0.0005609894 0.0002217944 -0.0002639423 -0.0006280912 -0.0006558848 -0.0003116516 0.0002302095 0.000672356 0.0007569611 0.0004151443 -0.0001827494 -0.0007105301 -0.0008633348 -0.0005328111 0.0001198653 0.000740687 0.0009739431 0.0006650982 -3.978596e-05 -0.0007606731 -0.001087446 -0.000812264 -5.92573e-05 0.0007681507 0.001202232 0.0009743467 0.0001789756 -0.0007606619 -0.001316475 -0.001151221 -0.0003210738 0.0007355841 0.001428108 0.001342583 0.0004872549 -0.0006901054 -0.001534781 -0.001547912 -0.0006791986 0.0006212272 0.001633851 0.001766458 0.0008985553 -0.000525758 -0.001722368 -0.001997225 -0.001146925 0.0004003372 0.001797108 0.002239017 0.001425926 -0.0002413754 -0.001854551 -0.002490485 -0.001737297 4.490918e-05 0.001890743 0.002750027 0.002082865 0.0001934096 -0.00190127 -0.0030158 -0.002464588 -0.0004783536 0.001881305 0.003285885 0.002884847 0.00081545 -0.001825347 -0.003558219 -0.003346589 -0.001211266 0.001726956 0.00383049 0.003853415 0.001673576 -0.001578719 -0.004100412 -0.004410211 -0.002212153 0.00137164 0.004365645 0.005023671 0.002839661 -0.001094378 -0.004623616 -0.005702867 -0.003572605 0.0007326606 0.004872056 0.006461129 0.004433995 -0.0002670525 -0.005108421 -0.007317586 -0.005456163 -0.0003292066 0.00533045 0.008301242 0.006687002 0.001095287 -0.00553586 -0.009457057 -0.008200167 -0.00209118 0.00572257 0.01085812 0.01011532 0.003414738 -0.005888622 -0.01263007 -0.01264009 -0.005237894 0.006032267 0.01500657 0.01616811 0.007894202 -0.006151966 -0.01847334 -0.02154509 -0.01212791 0.006246434 0.02422649 0.03097332 0.02000727 -0.006314661 -0.03620209 -0.05253434 -0.04022189 0.006355901 0.07934163 0.1589821 0.2205145 0.2436303102250250251B3.48031e-07 8.92428e-08 -5.82758e-06 -2.38526e-05 -4.97601e-05 -6.17555e-05 -3.91885e-05 5.88062e-06 3.10265e-05 1.28593e-05 -1.85067e-05 -1.81337e-05 9.4521e-06 1.87862e-05 -3.57992e-06 -1.81667e-05 -3.11841e-07 1.74219e-05 3.12364e-06 -1.6897e-05 -5.43947e-06 1.66206e-05 7.59389e-06 -1.65316e-05 -9.77688e-06 1.65563e-05 1.21136e-05 -1.66004e-05 -1.46621e-05 1.65891e-05 1.74555e-05 -1.64599e-05 -2.05224e-05 1.61366e-05 2.38584e-05 -1.55628e-05 -2.74541e-05 1.46947e-05 3.13092e-05 -1.34739e-05 -3.5403e-05 1.18553e-05 3.97139e-05 -9.8029e-06 -4.42339e-05 7.26053e-06 4.89349e-05 -4.17755e-06 -5.37778e-05 5.18995e-07 5.87351e-05 3.76157e-06 -6.37699e-05 -8.71152e-06 6.88319e-05 1.43636e-05 -7.38795e-05 -2.07564e-05 7.88692e-05 2.79385e-05 -8.37384e-05 -3.59474e-05 8.84178e-05 4.48068e-05 -9.28461e-05 -5.45431e-05 9.69635e-05 6.51929e-05 -0.000100694 -7.67877e-05 0.000103948 8.93421e-05 -0.000106642 -0.000102868 0.000108688 0.000117379 -0.000109993 -0.000132878 0.000110458 0.000149362 -0.000109991 -0.000166834 0.000108481 0.000185285 -0.000105807 -0.000204682 0.000101863 0.000224993 -9.65461e-05 -0.000246194 8.97361e-05 0.000268249 -8.13017e-05 -0.000291099 7.11181e-05 0.000314672 -5.90729e-05 -0.0003389 4.50497e-05 0.000363711 -2.89196e-05 -0.000389018 1.05522e-05 0.000414715 1.01741e-05 -0.000440684 -3.33719e-05 0.000466808 5.91546e-05 -0.000492966 -8.7649e-05 0.000519011 0.000118976 -0.000544775 -0.000153231 0.000570094 0.000190507 -0.00059481 -0.00023092 0.000618731 0.000274569 -0.000641639 -0.000321526 0.00066333 0.000371863 -0.000683597 -0.000425662 0.000702206 0.000482996 -0.0007189 -0.000543909 0.000733424 0.000608437 -0.000745529 -0.000676626 0.000754932 0.000748507 -0.000761332 -0.000824073 0.000764451 0.000903334 -0.000763987 -0.0009863 0.000759593 0.00107293 -0.000750952 -0.00116316 0.00073775 0.001257 -0.000719615 -0.00135436 0.00069618 0.00145515 -0.000667101 -0.00155931 0.000631977 0.00166673 -0.000590411 -0.00177727 0.000542033 0.00189085 -0.000486382 -0.00200729 0.00042305 0.00212645 -0.000351567 -0.00224814 0.000271521 0.00237226 -0.0001823 -0.00249846 8.36105e-05 0.00262677 2.53518e-05 -0.00275672 -0.000144942 0.00288823 0.000275804 -0.00302111 -0.000418701 0.00315494 0.000574163 -0.00328957 -0.000742955 0.00342479 0.000925993 -0.00356021 -0.00112408 0.00369556 0.00133812 -0.00383065 -0.00156928 0.00396515 0.00181881 -0.0040987 -0.00208799 0.00423105 0.00237836 -0.00436194 -0.00269177 0.00449105 0.0030303 -0.00461804 -0.00339629 0.00474259 0.00379255 -0.00486446 -0.00422243 0.00498336 0.00468996 -0.00509897 -0.00519994 0.00521097 0.00575821 -0.00531911 -0.00637197 0.00542314 0.00705024 -0.00552281 -0.00780439 0.00561783 0.00864895 -0.00570793 -0.00960272 0.00579289 0.0106907 -0.0058725 -0.0119464 0.00594657 0.0134167 -0.00601488 -0.0151678 0.00607725 0.0172973 -0.00613351 -0.0199551 0.00618351 0.0233836 -0.00622712 -0.0280013 0.00626424 0.0345995 -0.00629475 -0.0448737 0.00631856 0.0632332 -0.00633559 -0.105846 0.00634583 0.318224 0.4936511064381.499904381.499904382C1.16415e-10 5.82077e-10 1.74623e-09 4.07454e-09 8.14907e-09 1.46683e-08 2.44472e-08 3.84171e-08 5.76256e-08 8.3237e-08 1.16532e-07 1.58907e-07 2.11876e-07 2.77068e-07 3.56231e-07 4.51226e-07 5.64032e-07 6.96746e-07 8.51578e-07 1.03086e-06 1.23703e-06 1.47265e-06 1.74041e-06 2.04309e-06 2.3836e-06 2.76498e-06 3.19036e-06 3.66301e-06 4.18629e-06 4.76371e-06 5.39888e-06 6.09551e-06 6.85744e-06 7.68865e-06 8.5932e-06 9.57528e-06 1.06392e-05 1.17894e-05 1.30304e-05 1.43668e-05 1.58035e-05 1.73453e-05 1.89972e-05 2.07644e-05 2.26521e-05 2.46656e-05 2.68104e-05 2.90922e-05 3.15165e-05 3.40893e-05 3.68165e-05 3.9704e-05 4.27582e-05 4.59852e-05 4.93915e-05 5.29836e-05 5.67682e-05 6.07519e-05 6.49417e-05 6.93445e-05 7.39675e-05 7.88178e-05 8.39029e-05 8.923e-05 9.48064e-05 0.000100639 0.000106734 0.000113097 0.000119734 0.000126651 0.000133853 0.000141343 0.000149127 0.000157209 0.000165592 0.000174279 0.000183273 0.000192577 0.000202192 0.000212122 0.000222367 0.000232929 0.000243808 0.000255005 0.00026652 0.000278352 0.000290502 0.000302967 0.000315748 0.000328842 0.000342247 0.000355961 0.000369982 0.000384306 0.000398929 0.000413849 0.000429061 0.00044456 0.000460343 0.000476403 0.000492736 0.000509334 0.000526194 0.000543307 0.000560667 0.000578266 0.000596098 0.000614154 0.000632426 0.000650906 0.000669583 0.00068845 0.000707496 0.000726712 0.000746086 0.000765609 0.000785269 0.000805055 0.000824955 0.000844957 0.000865049 0.000885217 0.000905449 0.000925731 0.000946049 0.000966389 0.000986736 0.00100708 0.00102739 0.00104768 0.00106791 0.00108808 0.00110817 0.00112817 0.00114807 0.00116786 0.00118752 0.00120704 0.00122641 0.00124563 0.00126468 0.00128354 0.00130222 0.0013207 0.00133897 0.00135703 0.00137486 0.00139246 0.00140982 0.00142693 0.00144379 0.00146039 0.00147672 0.00149278 0.00150856 0.00152406 0.00153928 0.0015542 0.00156882 0.00158314 0.00159716 0.00161088 0.00162428 0.00163738 0.00165016 0.00166262 0.00167477 0.00168661 0.00169812 0.00170932 0.0017202 0.00173076 0.001741 0.00175093 0.00176055 0.00176985 0.00177885 0.00178753 0.00179592 0.001804 0.00181178 0.00181927 0.00182647 0.00183339 0.00184003 0.00184639 0.00185249 0.00185832 0.00186389 0.00186922 0.00187431 0.00187916 0.00188378 0.00188818 0.00189237 0.00189636 0.00190014 0.00190373 0.00190714 0.00191037 0.00191342 0.00191631 0.00191904 0.00192161 0.00192403 0.00192631 0.00192846 0.00193047 0.00193236 0.00193413 0.00193578 0.00193732 0.00193876 0.00194009 0.00194134 0.00194249 0.00194355 0.00194453 0.00194544 0.00194627 0.00194703 0.00194773 0.00194836 0.00194894 0.00194946 0.00194993 0.00195036 0.00195074 0.00195108 0.00195138 0.00195165 0.00195189 0.00195209 0.00195227 0.00195243 0.00195256 0.00195267 0.00195277 0.00195285 0.00195291 0.00195297 0.00195301 0.00195304 0.00195307 0.00195309 0.0019531 0.00195311 0.00195312 0.00195312 0.00195312 0.00195312 0.00195312 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.001953131064381.4912381.4912706A1.16415e-10 5.82077e-10 1.74623e-09 4.07454e-09 8.14907e-09 1.46683e-08 2.44472e-08 3.84171e-08 5.76256e-08 8.3237e-08 1.16532e-07 1.58907e-07 2.11876e-07 2.77068e-07 3.56231e-07 4.51226e-07 5.64032e-07 6.96746e-07 8.51578e-07 1.03086e-06 1.23703e-06 1.47265e-06 1.74041e-06 2.04309e-06 2.3836e-06 2.76498e-06 3.19036e-06 3.66301e-06 4.18629e-06 4.76371e-06 5.39888e-06 6.09551e-06 6.85744e-06 7.68865e-06 8.5932e-06 9.57528e-06 1.06392e-05 1.17894e-05 1.30304e-05 1.43668e-05 1.58035e-05 1.73453e-05 1.89972e-05 2.07644e-05 2.26521e-05 2.46656e-05 2.68104e-05 2.90922e-05 3.15165e-05 3.40893e-05 3.68165e-05 3.9704e-05 4.27582e-05 4.59852e-05 4.93915e-05 5.29836e-05 5.67682e-05 6.07519e-05 6.49417e-05 6.93445e-05 7.39675e-05 7.88178e-05 8.39029e-05 8.923e-05 9.48064e-05 0.000100639 0.000106734 0.000113097 0.000119734 0.000126651 0.000133853 0.000141343 0.000149127 0.000157209 0.000165592 0.000174279 0.000183273 0.000192577 0.000202192 0.000212122 0.000222367 0.000232929 0.000243808 0.000255005 0.00026652 0.000278352 0.000290502 0.000302967 0.000315748 0.000328842 0.000342247 0.000355961 0.000369982 0.000384306 0.000398929 0.000413849 0.000429061 0.00044456 0.000460343 0.000476403 0.000492736 0.000509334 0.000526194 0.000543307 0.000560667 0.000578266 0.000596098 0.000614154 0.000632426 0.000650906 0.000669583 0.00068845 0.000707496 0.000726712 0.000746086 0.000765609 0.000785269 0.000805055 0.000824955 0.000844957 0.000865049 0.000885217 0.000905449 0.000925731 0.000946049 0.000966389 0.000986736 0.00100708 0.00102739 0.00104768 0.00106791 0.00108808 0.00110817 0.00112817 0.00114807 0.00116786 0.00118752 0.00120704 0.00122641 0.00124563 0.00126468 0.00128354 0.00130222 0.0013207 0.00133897 0.00135703 0.00137486 0.00139246 0.00140982 0.00142693 0.00144379 0.00146039 0.00147672 0.00149278 0.00150856 0.00152406 0.00153928 0.0015542 0.00156882 0.00158314 0.00159716 0.00161088 0.00162428 0.00163738 0.00165016 0.00166262 0.00167477 0.00168661 0.00169812 0.00170932 0.0017202 0.00173076 0.001741 0.00175093 0.00176055 0.00176985 0.00177885 0.00178753 0.00179592 0.001804 0.00181178 0.00181927 0.00182647 0.00183339 0.00184003 0.00184639 0.00185249 0.00185832 0.00186389 0.00186922 0.00187431 0.00187916 0.00188378 0.00188818 0.00189237 0.00189636 0.00190014 0.00190373 0.00190714 0.00191037 0.00191342 0.00191631 0.00191904 0.00192161 0.00192403 0.00192631 0.00192846 0.00193047 0.00193236 0.00193413 0.00193578 0.00193732 0.00193876 0.00194009 0.00194134 0.00194249 0.00194355 0.00194453 0.00194544 0.00194627 0.00194703 0.00194773 0.00194836 0.00194894 0.00194946 0.00194993 0.00195036 0.00195074 0.00195108 0.00195138 0.00195165 0.00195189 0.00195209 0.00195227 0.00195243 0.00195256 0.00195267 0.00195277 0.00195285 0.00195291 0.00195297 0.00195301 0.00195304 0.00195307 0.00195309 0.0019531 0.00195311 0.00195312 0.00195312 0.00195312 0.00195312 0.00195312 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195313 0.00195312 0.00195312 0.00195312 0.00195312 0.00195312 0.00195311 0.0019531 0.00195309 0.00195307 0.00195304 0.00195301 0.00195297 0.00195291 0.00195285 0.00195277 0.00195267 0.00195256 0.00195243 0.00195227 0.00195209 0.00195189 0.00195165 0.00195138 0.00195108 0.00195074 0.00195036 0.00194993 0.00194946 0.00194894 0.00194836 0.00194773 0.00194703 0.00194627 0.00194544 0.00194453 0.00194355 0.00194249 0.00194134 0.00194009 0.00193876 0.00193732 0.00193578 0.00193413 0.00193236 0.00193047 0.00192846 0.00192631 0.00192403 0.00192161 0.00191904 0.00191631 0.00191342 0.00191037 0.00190714 0.00190373 0.00190014 0.00189636 0.00189237 0.00188818 0.00188378 0.00187916 0.00187431 0.00186922 0.00186389 0.00185832 0.00185249 0.00184639 0.00184003 0.00183339 0.00182647 0.00181927 0.00181178 0.001804 0.00179592 0.00178753 0.00177885 0.00176985 0.00176055 0.00175093 0.001741 0.00173076 0.0017202 0.00170932 0.00169812 0.00168661 0.00167477 0.00166262 0.00165016 0.00163738 0.00162428 0.00161088 0.00159716 0.00158314 0.00156882 0.0015542 0.00153928 0.00152406 0.00150856 0.00149278 0.00147672 0.00146039 0.00144379 0.00142693 0.00140982 0.00139246 0.00137486 0.00135703 0.00133897 0.0013207 0.00130222 0.00128354 0.00126468 0.00124563 0.00122641 0.00120704 0.00118752 0.00116786 0.00114807 0.00112817 0.00110817 0.00108808 0.00106791 0.00104768 0.00102739 0.00100708 0.000986736 0.000966389 0.000946049 0.000925731 0.000905449 0.000885217 0.000865049 0.000844957 0.000824955 0.000805055 0.000785269 0.000765609 0.000746086 0.000726712 0.000707496 0.00068845 0.000669583 0.000650906 0.000632426 0.000614154 0.000596098 0.000578266 0.000560667 0.000543307 0.000526194 0.000509334 0.000492736 0.000476403 0.000460343 0.00044456 0.000429061 0.000413849 0.000398929 0.000384306 0.000369982 0.000355961 0.000342247 0.000328842 0.000315748 0.000302967 0.000290502 0.000278352 0.00026652 0.000255005 0.000243808 0.000232929 0.000222367 0.000212122 0.000202192 0.000192577 0.000183273 0.000174279 0.000165592 0.000157209 0.000149127 0.000141343 0.000133853 0.000126651 0.000119734 0.000113097 0.000106734 0.000100639 9.48064e-05 8.923e-05 8.39029e-05 7.88178e-05 7.39675e-05 6.93445e-05 6.49417e-052016-01-01T00:00:00.0000ZTexas Seismological Network (TexNet)falsetrue{"type":"DOI","value":"10.7914/SN/TX"}FDSNXML:Identifier/02017-02-02T00:00:00.0000Z2026-12-31T23:59:59.0000ZAlpine30.374479-103.6385191368falsetrue2017-02-02T00:00:00.0000Z2026-12-31T23:59:59.0000Z30.374479-103.63851913682017-02-02T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx1yyyy120016.123504774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0545, Centaur S/N 1792"}FDSNXML:Sensor2017-02-02T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx2yyyy220016.132504774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0545, Centaur S/N 1792"}FDSNXML:Sensor2017-02-02T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx0yyyy020016.1235-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0545, Centaur S/N 1792"}FDSNXML:Sensor2016-12-16T00:00:00.0000Z2027-12-31T00:00:00.0000ZAspermont33.325119-100.121552501falsetrue2016-12-16T00:00:00.0000Z2027-12-31T00:00:00.0000Z33.325119-100.1215525012016-12-16T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx1yyyy120016.124404774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0543"}FDSNXML:Sensor2016-12-16T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx2yyyy220016.133404774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0543"}FDSNXML:Sensor2016-12-16T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx0yyyy020016.124404774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0543"}FDSNXML:Sensor2017-02-15T00:00:00.0000Z2026-12-31T23:59:59.0000ZBrady31.285872-98.999451460falsetrue2017-02-15T00:00:00.0000Z2026-12-31T23:59:59.0000Z31.285872-98.9994514602017-02-15T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx1yyyy120016.320204774130001M/STfalse{"description":"Trillium 120 Posthole S/N 533, Centaur S/N 1779"}FDSNXML:Sensor2017-02-15T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx2yyyy220016.329204774130001M/STfalse{"description":"Trillium 120 Posthole S/N 533, Centaur S/N 1779"}FDSNXML:Sensor2017-02-15T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx0yyyy020016.3202-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 533, Centaur S/N 1779"}FDSNXML:Sensor2017-09-28T00:00:00.0000ZCarthage32.288841-94.22516695falsetrue2017-09-28T00:00:00.0000Z32.288841-94.225166952017-09-28T00:00:00.0000Zxxxx1yyyy110010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4099, Centaur S/N 1630"}FDSNXML:Sensor2017-09-28T00:00:00.0000Zxxxx2yyyy2100109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4099, Centaur S/N 1630"}FDSNXML:Sensor2017-09-28T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4099, Centaur S/N 1630"}FDSNXML:Sensor2016-12-18T00:00:00.0000Z2026-12-31T23:59:59.0000ZDickens33.666531-100.923653843falsetrue2016-12-18T00:00:00.0000Z2026-12-31T23:59:59.0000Z33.666531-100.9236538432016-12-18T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx1yyyy120016.120604774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0531, Centaur S/N 1778"}FDSNXML:Sensor2016-12-18T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx2yyyy220016.129604774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0531, Centaur S/N 1778"}FDSNXML:Sensor2016-12-18T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx0yyyy020016.1206-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0531, Centaur S/N 1778"}FDSNXML:Sensor2017-03-17T00:00:00.0000Z2026-12-31T23:59:59.0000ZDel Rio29.423109-100.618881354falsetrue2017-03-17T00:00:00.0000Z2026-12-31T23:59:59.0000Z29.423109-100.6188813542017-03-17T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx1yyyy120010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4140, Centaur S/N 1710"}FDSNXML:Sensor2017-03-17T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx2yyyy2200109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4140, Centaur S/N 1710"}FDSNXML:Sensor2017-03-17T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx0yyyy0200100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4140, Centaur S/N 1710"}FDSNXML:Sensor2017-08-09T00:00:00.0000ZDarrouzett36.444012-100.296883791falsetrue2017-08-09T00:00:00.0000Z36.444012-100.2968837912017-08-09T00:00:00.0000Zxxxx1yyyy120016.18504774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0529, Centaur S/N 1774"}FDSNXML:Sensor2017-08-09T00:00:00.0000Zxxxx2yyyy220016.117504774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0529, Centaur S/N 1774"}FDSNXML:Sensor2017-08-09T00:00:00.0000Zxxxx0yyyy020016.185-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0529, Centaur S/N 1774"}FDSNXML:Sensor2019-01-24T22:30:00.0000ZChristine28.775499-98.53117496.3falsetrue2019-01-24T22:30:00.0000Z28.775499-98.53117496.32019-01-24T22:30:00.0000Zxxxx2yyyy2200109003270550001M/SGfalse{"description":"40T"}FDSNXML:Sensor2019-01-24T22:30:00.0000Zxxxx1yyyy120010003261600001M/SGfalse{"description":"40T"}FDSNXML:Sensor2019-01-24T22:30:00.0000Zxxxx0yyyy0200100-903277470001M/SGfalse{"description":"40T"}FDSNXML:Sensor2018-06-08T00:00:00.0000ZHobson28.947041-97.98036296falsetrue2018-06-08T00:00:00.0000Z28.947041-97.980362962018-06-08T00:00:00.0000Zxxxx1yyyy110010002989930001M/SCfalse{"description":"Trillium 20s Compact S/N 4129, Centaur S/N 1665"}FDSNXML:Sensor2018-06-08T00:00:00.0000Zxxxx2yyyy210010002989930001M/SCfalse{"description":"Trillium 20s Compact S/N 4129, Centaur S/N 1665"}FDSNXML:Sensor2018-06-08T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/SCfalse{"description":"Trillium 20s Compact S/N 4129, Centaur S/N 1665"}FDSNXML:Sensor2017-12-19T00:00:00.0000ZEagle Ford 0429.21003-97.789864116falsetrue2017-12-19T00:00:00.0000Z29.21003-97.7898641162017-12-19T00:00:00.0000Zxxxx1yyyy110010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4116, Centaur S/N 1657"}FDSNXML:Sensor2017-12-19T00:00:00.0000Zxxxx2yyyy2100109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4116, Centaur S/N 1657"}FDSNXML:Sensor2017-12-19T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4116, Centaur S/N 1657"}FDSNXML:Sensor2018-11-08T00:00:00.0000ZYorktown28.989998-97.51789173falsetrue2018-11-08T00:00:00.0000Z28.989998-97.517891732018-11-08T00:00:00.0000Zxxxx1yyyy110010002989930001M/STfalse{"description":"Trillium Compact 20s S/N 4131, Centaur S/N 0191"}FDSNXML:Sensor2018-11-08T00:00:00.0000Zxxxx2yyyy2100109002989930001M/STfalse{"description":"Trillium Compact 20s S/N 4131, Centaur S/N 0191"}FDSNXML:Sensor2018-11-08T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium Compact 20s S/N 4131, Centaur S/N 0191"}FDSNXML:Sensor2019-06-07T02:00:00.0000ZChireno31.46468-94.365227127falsetrue2019-06-07T02:00:00.0000Z31.46468-94.3652271272019-06-07T02:00:00.0000Zxxxx1yyyy1010002989930001M/STfalse{"description":"Trillium Compact 20s S/N - 4149 Centaur - 1798"}FDSNXML:Sensor2019-06-07T02:00:00.0000Zxxxx2yyyy20109002989930001M/STfalse{"description":"Trillium Compact 20s S/N - 4149 Centaur - 1798"}FDSNXML:Sensor2019-06-07T02:00:00.0000Zxxxx0yyyy00100-902989930001M/STfalse{"description":"Trillium Compact 20s S/N - 4149 Centaur - 1798"}FDSNXML:Sensor2016-10-20T00:00:00.0000Z2027-12-31T00:00:00.0000ZDallas Executive Airport32.684048-96.878326203.4falsetrue2016-10-20T00:00:00.0000Z2027-12-31T00:00:00.0000Z32.684048-96.878326203.42016-10-20T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx1yyyy110010002994700001M/STfalse{"description":"Trillium 20s Compact S/N 4118, Centaur S/N 1754"}FDSNXML:Sensor2016-10-20T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx2yyyy2100109002994700001M/STfalse{"description":"Trillium 20s Compact S/N 4118, Centaur S/N 1754"}FDSNXML:Sensor2016-10-20T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx0yyyy0100100-902994700001M/STfalse{"description":"Trillium 20s Compact S/N 4118, Centaur S/N 1754"}FDSNXML:Sensor2016-09-22T20:07:06.0000Z2027-12-31T00:00:00.0000ZCity of Haslet32.950191-97.342552226.5falsetrue2016-09-22T20:07:06.0000Z2027-12-31T00:00:00.0000Z32.950191-97.342552226.52016-09-22T20:07:06.0000Z2027-12-31T00:00:00.0000Zxxxx1yyyy110010002989930001M/S**2Tfalse{"description":"Treillium 20s posthole, sn 004139 cenentaur 001755"}FDSNXML:Sensor2016-09-22T20:07:06.0000Z2027-12-31T00:00:00.0000Zxxxx2yyyy2100109002989930001M/S**2Tfalse{"description":"Treillium 20s posthole, sn 004139 cenentaur 001755"}FDSNXML:Sensor2016-09-22T20:07:06.0000Z2027-12-31T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4139, Centaur S/N 1755"}FDSNXML:Sensor2016-10-17T19:54:04.0000Z2027-12-31T00:00:00.0000ZPerrin-Whitt Elementary School33.039623-98.065201321.7falsetrue2016-10-17T19:54:04.0000Z2027-12-31T00:00:00.0000Z33.039623-98.065201321.72016-10-17T19:54:04.0000Z2027-12-31T00:00:00.0000Zxxxx1yyyy110010002994700001M/STfalse{"description":"Trillium 20s Compact S/N 4098, Centaur S/N 1756"}FDSNXML:Sensor2016-10-17T19:54:04.0000Z2027-12-31T00:00:00.0000Zxxxx2yyyy2100109002994700001M/STfalse{"description":"Trillium 20s Compact S/N 4098, Centaur S/N 1756"}FDSNXML:Sensor2016-10-17T19:54:04.0000Z2027-12-31T00:00:00.0000Zxxxx0yyyy0100100-902994700001M/STfalse{"description":"Trillium 20s Compact S/N 4098, Centaur S/N 1756"}FDSNXML:Sensor2016-09-29T00:00:00.0000ZUniversity of Dallas32.84763-96.923248144.8falsetrue2016-09-29T00:00:00.0000Z32.84763-96.923248144.82016-09-29T00:00:00.0000Zxxxx1yyyy110010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4117, Centaur S/N 1757"}FDSNXML:Sensor2016-09-29T00:00:00.0000Zxxxx2yyyy2100109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4117, Centaur S/N 1757"}FDSNXML:Sensor2016-09-29T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4117, Centaur S/N 1757"}FDSNXML:Sensor2016-11-16T00:00:00.0000Z2027-12-31T00:00:00.0000ZUT at Dallas32.98867-96.747078207.3falsetrue2016-11-16T00:00:00.0000Z2027-12-31T00:00:00.0000Z32.98867-96.747078207.32016-11-16T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx2yyyy2100109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4111, Centaur S/N 1758"}FDSNXML:Sensor2016-11-16T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4111, Centaur S/N 1758"}FDSNXML:Sensor2016-11-16T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx1yyyy110010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4111, Centaur S/N 1758"}FDSNXML:Sensor2016-10-20T00:00:00.0000Z2027-12-31T00:00:00.0000ZAzle32.972561-97.556931233falsetrue2016-10-20T00:00:00.0000Z2027-12-31T00:00:00.0000Z32.972561-97.5569312332016-10-20T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx1yyyy110010002989930001M/STfalse{"description":"Trillium 20s compact S/N 4108, Centaur S/N 1762"}FDSNXML:Sensor2016-10-20T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx2yyyy2100109002989930001M/STfalse{"description":"Trillium 20s compact S/N 4108, Centaur S/N 1762"}FDSNXML:Sensor2016-10-20T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium 20s compact S/N 4108, Centaur S/N 1762"}FDSNXML:Sensor2016-10-19T00:00:00.0000Z2027-12-31T00:00:00.0000ZWeatherford32.702465-97.786095310.9falsetrue2016-10-19T00:00:00.0000Z2027-12-31T00:00:00.0000Z32.702465-97.786095310.92016-10-19T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx1yyyy110010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4109, Centaur S/N 1765"}FDSNXML:Sensor2016-10-19T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx2yyyy2100109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4109, Centaur S/N 1765"}FDSNXML:Sensor2016-10-19T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4109, Centaur S/N 1765"}FDSNXML:Sensor2016-09-30T00:00:00.0000ZFarmers Branch32.922821-96.913925310.9falsetrue2016-09-30T00:00:00.0000Z32.922821-96.913925310.92016-09-30T00:00:00.0000Zxxxx1yyyy110010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4107, Centaur S/N 1763"}FDSNXML:Sensor2016-09-30T00:00:00.0000Zxxxx2yyyy2100109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4107, Centaur S/N 1763"}FDSNXML:Sensor2016-09-30T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4107, Centaur S/N 1763"}FDSNXML:Sensor2016-10-20T00:00:00.0000Z2027-12-31T00:00:00.0000ZUT at Arlington32.722729-97.127869197.5falsetrue2016-10-20T00:00:00.0000Z2027-12-31T00:00:00.0000Z32.722729-97.127869197.52016-10-20T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx1yyyy110010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4109, Centaur S/N 1765"}FDSNXML:Sensor2016-10-20T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx2yyyy2100109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4109, Centaur S/N 1765"}FDSNXML:Sensor2016-10-20T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4109, Centaur S/N 1765"}FDSNXML:Sensor2016-09-22T00:00:00.0000Z2027-12-31T00:00:00.0000ZVenus32.499077-97.148598203.9falsetrue2016-09-22T00:00:00.0000Z2027-12-31T00:00:00.0000Z32.499077-97.148598203.92016-09-22T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx1yyyy110010002994700001M/STfalse{"description":"Trillium 20s Compact S/N 4112, Centaur S/N 1760"}FDSNXML:Sensor2016-09-22T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx2yyyy2100109002994700001M/STfalse{"description":"Trillium 20s Compact S/N 4112, Centaur S/N 1760"}FDSNXML:Sensor2016-09-22T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx0yyyy0100100-902994700001M/STfalse{"description":"Trillium 20s Compact S/N 4112, Centaur S/N 1760"}FDSNXML:Sensor2016-11-14T00:00:00.0000Z2027-12-31T00:00:00.0000ZCleburne32.351719-97.431122259falsetrue2016-11-14T00:00:00.0000Z2027-12-31T00:00:00.0000Z32.351719-97.4311222592016-11-14T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx1yyyy120010002994700001M/STfalse{"description":"Trillium 120s Compact Centaur S/N 0237"}FDSNXML:Sensor2016-11-14T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx2yyyy2200109002994700001M/STfalse{"description":"Trillium 120s Compact Centaur S/N 0237"}FDSNXML:Sensor2016-11-14T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx0yyyy0200100-902994700001M/STfalse{"description":"Trillium 120s Compact Centaur S/N 0237"}FDSNXML:Sensor2016-11-14T00:00:00.0000Z2027-12-31T00:00:00.0000ZAlvarado32.399101-97.192299219falsetrue2016-11-14T00:00:00.0000Z2027-12-31T00:00:00.0000Z32.399101-97.1922992192016-11-14T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx1yyyy120010002994700001M/STfalse{"description":"Trillium 120s Compact 2360 Centaur 0191"}FDSNXML:Sensor2016-11-14T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx2yyyy2200109002994700001M/STfalse{"description":"Trillium 120s Compact 2360 Centaur 0191"}FDSNXML:Sensor2016-11-14T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx0yyyy0200100-902994700001M/STfalse{"description":"Trillium 120s Compact 2360 Centaur 0191"}FDSNXML:Sensor2017-04-12T00:00:00.0000Z2027-12-31T00:00:00.0000ZTexas Christian University32.704399-97.369499192falsetrue2017-04-12T00:00:00.0000Z2027-12-31T00:00:00.0000Z32.704399-97.3694991922017-04-12T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx1yyyy120010002989930001M/STfalse{"description":"Trillium Compact S/N 2389, Centaur S/N 0249"}FDSNXML:Sensor2017-04-12T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx2yyyy2200109002989930001M/STfalse{"description":"Trillium Compact S/N 2389, Centaur S/N 0249"}FDSNXML:Sensor2017-04-12T00:00:00.0000Z2027-12-31T00:00:00.0000Zxxxx0yyyy0200100-902989930001M/STfalse{"description":"Trillium Compact S/N 2389, Centaur S/N 0249"}FDSNXML:Sensor2019-06-07T21:00:00.0000ZWaxahatchie32.4818-96.897095216falsetrue2019-06-07T21:00:00.0000Z32.4818-96.8970952162019-06-07T21:00:00.0000Zxxxx2yyyy2200109004095791m/sGfalse{"description":"Guralp 40T S/N - T43879, Minimus S/N MIN-C659"}FDSNXML:Sensor2019-06-07T21:00:00.0000Zxxxx1yyyy120010004097101m/sGfalse{"description":"Guralp 40T S/N - T43879, Minimus S/N MIN-C659"}FDSNXML:Sensor2019-06-07T21:00:00.0000Zxxxx0yyyy020010-904096151m/sGfalse{"description":"Guralp 40T S/N - T43879, Minimus S/N MIN-C659"}FDSNXML:Sensor2017-02-15T00:00:00.0000Z2026-12-31T23:59:59.0000ZHebbronville27.064993-98.669632155falsetrue2017-02-15T00:00:00.0000Z2026-12-31T23:59:59.0000Z27.064993-98.6696321552017-02-15T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx1yyyy120016.323804774130001M/STfalse{"description":"Trillium 120 Posthole S/N 539, Centaur S/N 1775"}FDSNXML:Sensor2017-02-15T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx2yyyy220016.332804774130001M/STfalse{"description":"Trillium 120 Posthole S/N 539, Centaur S/N 1775"}FDSNXML:Sensor2017-02-15T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx0yyyy020016.3238-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 539, Centaur S/N 1775"}FDSNXML:Sensor2017-02-15T00:00:00.0000Z2026-12-31T23:59:59.0000ZHondo29.528799-99.280807381falsetrue2017-02-15T00:00:00.0000Z2026-12-31T23:59:59.0000Z29.528799-99.2808073812017-02-15T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx1yyyy120016.321604774130001M/STfalse{"description":"Trillium 120 Posthole S/N 535, Centaur S/N 1692"}FDSNXML:Sensor2017-02-15T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx2yyyy220016.330604774130001M/STfalse{"description":"Trillium 120 Posthole S/N 535, Centaur S/N 1692"}FDSNXML:Sensor2017-02-15T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx0yyyy020016.3216-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 535, Centaur S/N 1692"}FDSNXML:Sensor2017-04-06T00:00:00.0000ZHuntsville, TX30.75643-95.46663787falsetrue2017-04-06T00:00:00.0000Z30.75643-95.466637872017-04-06T00:00:00.0000Zxxxx1yyyy120010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4138, Centaur S/N 1716"}FDSNXML:Sensor2017-04-06T00:00:00.0000Zxxxx2yyyy2200109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4138, Centaur S/N 1716"}FDSNXML:Sensor2017-04-06T00:00:00.0000Zxxxx0yyyy0200100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4138, Centaur S/N 1716"}FDSNXML:Sensor2019-03-01T04:00:00.0000ZMidkiff31.667734-102.082909859falsetrue2019-03-01T04:00:00.0000Z31.667734-102.0829098592019-03-01T04:00:00.0000Zxxxx1yyyy1100100046515300000.05M/SGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2019-03-01T04:00:00.0000Zxxxx2yyyy21001090046515300000.05M/SGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2019-03-01T04:00:00.0000Zxxxx0yyyy0100100-9046515300000.05M/SGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2018-11-16T00:00:00.0000ZRankin31.19806-102.037907767.7falsetrue2019-10-02T00:00:00.0000Z31.198059-102.03791767.72019-10-02T00:00:00.0000Zxxxx1yyyy110010.50046515300000.05m/sGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2019-10-02T00:00:00.0000Zxxxx2yyyy210010.590046515300000.05m/sGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2019-10-02T00:00:00.0000Zxxxx0yyyy010010.50-9046515300000.05m/sGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2018-11-16T19:00:00.0000ZSeminole32.62542-102.488022956falsetrue2018-11-16T19:00:00.0000Z32.62542-102.4880229562018-11-16T19:00:00.0000Zxxxx1yyyy1100100046515300000.05M/SGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2018-11-16T19:00:00.0000Zxxxx2yyyy21001090046515300000.05M/SGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2018-11-16T19:00:00.0000Zxxxx0yyyy0100100-9046515300000.05M/SGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2019-03-01T00:00:00.0000ZKlondike32.626541-101.859718882falsetrue2019-03-01T00:00:00.0000Z32.626541-101.8597188822019-03-01T00:00:00.0000Zxxxx1yyyy1100100046515300000.05M/SGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2019-03-01T00:00:00.0000Zxxxx2yyyy21001090046515300000.05M/SGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2019-03-01T00:00:00.0000Zxxxx0yyyy0100100-9046515300000.05M/SGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2019-02-28T02:00:00.0000ZGreenwood31.977983-101.799973801falsetrue2019-02-28T02:00:00.0000Z31.977983-101.7999738012019-02-28T02:00:00.0000Zxxxx1yyyy1100100046515300000.05M/SGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2019-02-28T02:00:00.0000Zxxxx2yyyy21001090046515300000.05M/SGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2019-02-28T02:00:00.0000Zxxxx0yyyy0100100-9046515300000.05M/SGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2019-07-25T17:00:00.0000ZWest Midland32.00061-102.252769880falsetrue2019-07-25T17:00:00.0000Z32.00061-102.2527698802019-07-25T17:00:00.0000Zxxxx1yyyy1100100046515300000.05M/SGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2019-07-25T17:00:00.0000Zxxxx2yyyy21001090046515300000.05M/SGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2019-07-25T17:00:00.0000Zxxxx0yyyy0100100-9046515300000.05M/SGfalse{"description":"Guralp CMG-3TD"}FDSNXML:Sensor2021-01-11T01:00:00.0000ZLenorah32.3391-101.82798.5United Statesfalsetrue2021-01-11T01:00:00.0000Z32.3391-101.82798.52021-01-11T01:00:00.0000ZMINP-015A2T438852100109003278688001M/Sfalse{"type":"Guralp CMG-40T","description":"Guralp CMG-40T","manufacturer":"Guralp","model":"CMG-40T","serialNumber":"T43885","resourceId":"NRL/Guralp/CMG40T.60_100.800"}FDSNXML:Sensor{"type":"Guralp Minimus","manufacturer":"Guralp","model":"Minimus","serialNumber":"MINP-015A","resourceId":"Datalogger#20180703142830.150632.2387"}FDSNXML:DataLogger2021-01-11T01:00:00.0000ZMINP-015A1T43885110010003278688001M/Sfalse{"type":"Guralp CMG-40T","description":"Guralp CMG-40T","manufacturer":"Guralp","model":"CMG-40T","serialNumber":"T43885","resourceId":"NRL/Guralp/CMG40T.60_100.800"}FDSNXML:Sensor{"type":"Guralp Minimus","manufacturer":"Guralp","model":"Minimus","serialNumber":"MINP-015A","resourceId":"Datalogger#20180703142830.150632.2387"}FDSNXML:DataLogger2021-01-11T01:00:00.0000ZMINP-015A0T438850100100-903278688001M/Sfalse{"type":"Guralp CMG-40T","description":"Guralp CMG-40T","manufacturer":"Guralp","model":"CMG-40T","serialNumber":"T43885","resourceId":"NRL/Guralp/CMG40T.60_100.800"}FDSNXML:Sensor{"type":"Guralp Minimus","manufacturer":"Guralp","model":"Minimus","serialNumber":"MINP-015A","resourceId":"Datalogger#20180703142830.150632.2387"}FDSNXML:DataLogger2017-08-03T00:00:00.0000ZMuldoon29.782749-97.068626106falsetrue2017-08-03T00:00:00.0000Z29.782749-97.0686261062017-08-03T00:00:00.0000Zxxxx1yyyy120016.116204774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0526, Centaur S/N 1791"}FDSNXML:Sensor2017-08-03T00:00:00.0000Zxxxx2yyyy220016.125204774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0526, Centaur S/N 1791"}FDSNXML:Sensor2017-08-03T00:00:00.0000Zxxxx0yyyy020016.1162-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0526, Centaur S/N 1791"}FDSNXML:Sensor2017-08-02T00:00:00.0000ZMonahans31.340231-102.761414750falsetrue2017-08-02T00:00:00.0000Z31.340231-102.7614147502017-08-02T00:00:00.0000Zxxxx1yyyy120016.110004774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0527, Centaur S/N 1781"}FDSNXML:Sensor2017-08-02T00:00:00.0000Zxxxx2yyyy220016.119004774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0527, Centaur S/N 1781"}FDSNXML:Sensor2017-08-02T00:00:00.0000Zxxxx0yyyy020016.1100-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0527, Centaur S/N 1781"}FDSNXML:Sensor2016-12-17T00:00:00.0000Z2026-12-31T23:59:59.0000ZOdessa32.12009-102.549133943falsetrue2016-12-17T00:00:00.0000Z2026-12-31T23:59:59.0000Z32.12009-102.5491339432016-12-17T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx1yyyy120016.113004774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0537, Centaur S/N 1655"}FDSNXML:Sensor2016-12-17T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx2yyyy220016.122004774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0537, Centaur S/N 1655"}FDSNXML:Sensor2016-12-17T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx0yyyy020016.1130-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0537, Centaur S/N 1655"}FDSNXML:Sensor2017-02-08T00:00:00.0000Z2026-12-31T23:59:59.0000ZOzona30.921949-101.127083770.8falsetrue2017-02-08T00:00:00.0000Z2026-12-31T23:59:59.0000Z30.921949-101.127083770.82017-02-08T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx1yyyy120016.135104774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0536, Centaur S/N 1788"}FDSNXML:Sensor2017-02-08T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx2yyyy220016.18104774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0536, Centaur S/N 1788"}FDSNXML:Sensor2017-02-08T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx0yyyy020016.1351-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0536, Centaur S/N 1788"}FDSNXML:Sensor2017-09-13T00:00:00.0000ZPermian Basin 130.94367-103.781121010falsetrue2017-09-13T00:00:00.0000Z30.94367-103.7811210102017-09-13T00:00:00.0000Zxxxx1yyyy110010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4132, Centaur S/N 1663"}FDSNXML:Sensor2017-09-13T00:00:00.0000Zxxxx2yyyy2100109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4132, Centaur S/N 1663"}FDSNXML:Sensor2017-09-13T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4132, Centaur S/N 1663"}FDSNXML:Sensor2017-01-27T00:00:00.0000ZCrockett Middle School (Pecos)31.408951-103.510162792falsetrue2020-04-30T23:00:00.0000Z31.408911-103.510162792.22020-04-30T23:00:00.0000Zxxxx2yyyy2200109003269060001m/sGfalse{"description":"Guralp 40T, Minimus+ MINP-005A S/N 90"}FDSNXML:Sensor2020-04-30T23:00:00.0000Zxxxx1yyyy120010003260980001m/sGfalse{"description":"Guralp 40T, Minimus+ MINP-005A S/N 90"}FDSNXML:Sensor2020-04-30T23:00:00.0000Zxxxx0yyyy0200100-903277660001m/sGfalse{"description":"Guralp 40T, Minimus+ MINP-005A S/N 90"}FDSNXML:Sensor2017-11-11T00:00:00.0000ZSaragosa31.083839-103.513947817falsetrue2017-11-11T00:00:00.0000Z31.083839-103.5139478172017-11-11T00:00:00.0000Zxxxx1yyyy110010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4147, Centaur S/N 1798"}FDSNXML:Sensor2017-11-11T00:00:00.0000Zxxxx2yyyy2100109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4147, Centaur S/N 1798"}FDSNXML:Sensor2017-11-11T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4147, Centaur S/N 1798"}FDSNXML:Sensor2019-02-12T00:00:00.0000ZVerhalen31.186979-103.269348812falsetrue2019-02-12T00:00:00.0000Z31.186979-103.2693488122019-02-12T00:00:00.0000Zxxxx1yyyy110010002989930001M/SCfalse{"description":"Trillium 20s Compact S/N 4101, Centaur S/N 1628"}FDSNXML:Sensor2019-02-12T00:00:00.0000Zxxxx2yyyy2100109002989930001M/SCfalse{"description":"Trillium 20s Compact S/N 4101, Centaur S/N 1628"}FDSNXML:Sensor2019-02-12T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/SCfalse{"description":"Trillium 20s Compact S/N 4101, Centaur S/N 1628"}FDSNXML:Sensor2017-11-10T00:00:00.0000ZHovey Rd30.91987-103.324783957falsetrue2017-11-10T00:00:00.0000Z30.91987-103.3247839572017-11-10T00:00:00.0000Zxxxx1yyyy110010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4148, Centaur S/N 1767"}FDSNXML:Sensor2017-11-10T00:00:00.0000Zxxxx2yyyy2100109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4148, Centaur S/N 1767"}FDSNXML:Sensor2017-11-10T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4148, Centaur S/N 1767"}FDSNXML:Sensor2017-10-11T00:00:00.0000ZPermian Basin 631.6472-103.218246831falsetrue2017-10-11T00:00:00.0000Z31.6472-103.2182468312017-10-11T00:00:00.0000Zxxxx1yyyy110010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4121, Centaur S/N 1624"}FDSNXML:Sensor2017-10-11T00:00:00.0000Zxxxx2yyyy2100109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4121, Centaur S/N 1624"}FDSNXML:Sensor2017-10-11T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4121, Centaur S/N 1624"}FDSNXML:Sensor2017-10-11T00:00:00.0000ZMentone31.57935-103.66793856falsetrue2020-06-05T00:00:00.0000Z31.57935-103.667938562020-06-05T00:00:00.0000Zxxxx1yyyy110010003012400001m/sTfalse{"description":"Centaur S/N 1764, Silicon Audio 203P-15"}FDSNXML:Sensor2020-06-05T00:00:00.0000Zxxxx2yyyy2100109003012400001m/sTfalse{"description":"Centaur S/N 1764, Silicon Audio 203P-15"}FDSNXML:Sensor2020-06-05T00:00:00.0000Zxxxx0yyyy0100100-903012400001m/sTfalse{"description":"Centaur S/N 1764, Silicon Audio 203P-15"}FDSNXML:Sensor2017-05-25T00:00:00.0000ZPermian Basin 0830.891741-102.907356926falsetrue2017-05-25T00:00:00.0000Z30.891741-102.9073569262017-05-25T00:00:00.0000Zxxxx1yyyy110010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4110, Centaur S/N 1662"}FDSNXML:Sensor2017-05-25T00:00:00.0000Zxxxx2yyyy2100109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4110, Centaur S/N 1662"}FDSNXML:Sensor2017-05-25T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4110, Centaur S/N 1662"}FDSNXML:Sensor2019-05-22T00:00:00.0000ZCulberson County31.774145-104.3014441139falsetrue2019-05-22T00:00:00.0000Z31.774145-104.30144411392019-05-22T00:00:00.0000Zxxxx2yyyy2100109003260940001m/sGCfalse{"description":"Guralp 40T S/N T43896, Minimus+ S/N MINP-035A"}FDSNXML:Sensor2019-05-22T00:00:00.0000Zxxxx1yyyy110010003261120001m/sGCfalse{"description":"Guralp 40T S/N T43896, Minimus+ S/N MINP-035A"}FDSNXML:Sensor2019-05-22T00:00:00.0000Zxxxx0yyyy0100100-903278510001m/sGfalse{"description":"Guralp 40T S/N T43896, Minimus+ S/N MINP-035A"}FDSNXML:Sensor2018-10-17T00:00:00.0000ZSoutheast of Toyah31.283607-103.754585858falsetrue2018-10-17T00:00:00.0000Z31.283607-103.7545858582018-10-17T00:00:00.0000Zxxxx1yyyy110010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4127, Centaur S/N 1765"}FDSNXML:Sensor2018-10-17T00:00:00.0000Zxxxx2yyyy2100109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4127, Centaur S/N 1765"}FDSNXML:Sensor2018-10-17T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4127, Centaur S/N 1765"}FDSNXML:Sensor2018-05-03T00:00:00.0000ZChina Draw31.93553-104.034119915falsetrue2018-05-03T00:00:00.0000Z31.93553-104.0341199152018-05-03T00:00:00.0000Zxxxx1yyyy110010002989930001M/SCfalse{"description":"Trillium 20s Compact S/N 4120, Centaur S/N 1759"}FDSNXML:Sensor2018-05-03T00:00:00.0000Zxxxx2yyyy2100109002989930001M/SCfalse{"description":"Trillium 20s Compact S/N 4120, Centaur S/N 1759"}FDSNXML:Sensor2018-05-03T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/SCfalse{"description":"Trillium 20s Compact S/N 4120, Centaur S/N 1759"}FDSNXML:Sensor2018-11-13T00:00:00.0000ZSouthwest of Toyah31.21324-103.957703961.6falsetrue2019-03-13T19:40:56.175Z31.21324-103.957703961.62019-03-13T19:40:56.175Zxxxx2yyyy220010900294032000.005m/sGfalse{"description":"Guralp 40T, Minimus MIN-C559 S/N 50521"}FDSNXML:Sensor2019-03-13T19:40:56.175Zxxxx1yyyy12001000292562000.005m/sGfalse{"description":"Guralp 40T, Minimus MIN-C559 S/N 50521"}FDSNXML:Sensor2019-03-13T19:40:56.175Zxxxx0yyyy0200100-90294032000.005m/sGfalse{"description":"Guralp 40T, Minimus MIN-C559 S/N 50521"}FDSNXML:Sensor2019-05-25T00:00:00.0000ZReeves - Culberson Border31.554213-103.845879956.1falsetrue2019-05-25T00:00:00.0000Z31.554213-103.845879956.12019-05-25T00:00:00.0000Zxxxx2yyyy2200109003276770001M/SGCfalse{"description":"Guralp 40T S/N T43883, Minimus S/N MIN-C959"}FDSNXML:Sensor2019-05-25T00:00:00.0000Zxxxx1yyyy120010003278650001M/SGCfalse{"description":"Guralp 40T S/N T43883, Minimus S/N MIN-C959"}FDSNXML:Sensor2019-05-25T00:00:00.0000Zxxxx0yyyy0200100-903278530001M/SGfalse{"description":"Guralp 40T S/N T43883, Minimus S/N MIN-C959"}FDSNXML:Sensor2019-08-22T04:00:00.0000ZWeatherby Road31.12932-103.1511807falsetrue2019-08-22T04:00:00.0000Z31.129347-103.1511388402019-08-22T04:00:00.0000Zxxxx2yyyy2200109004098011M/SGfalse{"description":"1CMG-40T S/N T43837, Minimus+ S/N MINP-025A"}FDSNXML:Sensor2019-08-22T04:00:00.0000Zxxxx1yyyy120010004099071M/SGfalse{"description":"1CMG-40T S/N T43837, Minimus+ S/N MINP-025A"}FDSNXML:Sensor2019-08-22T04:00:00.0000Zxxxx0yyyy0200100-904097991M/SGfalse{"description":"1CMG-40T S/N T43837, Minimus+ S/N MINP-025A"}FDSNXML:Sensor2019-06-28T01:00:00.0000ZSouthwest of Coyonosa31.211399-103.084389825falsetrue2019-06-28T01:00:00.0000Z31.211399-103.0843898252019-06-28T01:00:00.0000Zxxxx1yyyy11001000CGfalse{"description":"Guralp CMG-6TD S/N - T6A94/C1146"}FDSNXML:Sensor2019-06-28T01:00:00.0000Zxxxx2yyyy21001090022385300000.05M/SCGfalse{"description":"Guralp CMG-6TD S/N - T6A94/C1146"}FDSNXML:Sensor2019-06-28T01:00:00.0000Zxxxx0yyyy0100100-9022385300000.05M/SCGfalse{"description":"Guralp CMG-6TD S/N - T6A94/C1146"}FDSNXML:Sensor2019-02-13T00:00:00.0000ZCoyonosa31.124966-103.251984848falsetrue2019-02-13T00:00:00.0000Z31.124966-103.2519848482019-02-13T00:00:00.0000Zxxxx1yyyy110010002989930001M/SCfalse{"description":"Trillium 20s Compact S/N 4130, Centaur S/N 1659"}FDSNXML:Sensor2019-02-13T00:00:00.0000Zxxxx2yyyy2100109002989930001M/SCfalse{"description":"Trillium 20s Compact S/N 4130, Centaur S/N 1659"}FDSNXML:Sensor2019-02-13T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/SCfalse{"description":"Trillium 20s Compact S/N 4130, Centaur S/N 1659"}FDSNXML:Sensor2020-02-20T20:00:00.0000ZFort Stockton30.99675-103.15182734falsetrue2020-02-20T20:00:00.0000Z30.99675-103.151827342020-02-20T20:00:00.0000Zxxxx1yyyy1100100024489800001m/sGCfalse{"description":"unknown"}FDSNXML:Sensor2020-02-20T20:00:00.0000Zxxxx2yyyy21001090024489800001m/sGCfalse{"description":"unknown"}FDSNXML:Sensor2020-02-20T20:00:00.0000Zxxxx0yyyy0100100-9024489800001m/sGCfalse{"description":"unknown"}FDSNXML:Sensor2019-06-28T01:00:00.0000ZSouthwest of Coyonosa31.20084-103.1996825falsetrue2019-06-28T01:00:00.0000Z31.20084-103.19968252019-06-28T01:00:00.0000Zxxxx1yyyy11001000CGfalse{"description":"Guralp CMG-6TD S/N - T6A82/C1144"}FDSNXML:Sensor2019-06-28T01:00:00.0000Zxxxx2yyyy21001090022385300000.05M/SCGfalse{"description":"Guralp CMG-6TD S/N - T6A82/C1144"}FDSNXML:Sensor2019-06-28T01:00:00.0000Zxxxx0yyyy0100100-9022385300000.05M/SCGfalse{"description":"Guralp CMG-6TD S/N - T6A82/C1144"}FDSNXML:Sensor2019-05-24T00:00:00.0000ZWest of Imperial31.303087-103.099701782.4falsetrue2019-05-24T00:00:00.0000Z31.303066-103.099686776.42019-05-24T00:00:00.0000Zxxxx2yyyy2200109003252250001m/sGCfalse{"description":"Guralp 40T S/N T43844"}FDSNXML:Sensor2019-05-24T00:00:00.0000Zxxxx1yyyy120010003261770001m/sGCfalse{"description":"Guralp 40T S/N T43844"}FDSNXML:Sensor2019-05-24T00:00:00.0000Zxxxx0yyyy0200100-903270240001m/sGfalse{"description":"Guralp 40T S/N T43844"}FDSNXML:Sensor2019-09-11T20:00:00.0000ZReeves Ward Border31.341918-103.06216753falsetrue2019-09-11T20:00:00.0000Z31.341918-103.062167532019-09-11T20:00:00.0000Zxxxx1yyyy110010002989930001m/sTfalse{"description":"1Trillium 20s Compact S/N 4109, Centaur S/N 1631"}FDSNXML:Sensor2019-09-11T20:00:00.0000Zxxxx2yyyy2100109002989930001m/sTfalse{"description":"1Trillium 20s Compact S/N 4109, Centaur S/N 1631"}FDSNXML:Sensor2019-09-11T20:00:00.0000Zxxxx0yyyy0100100-902989930001m/sTfalse{"description":"1Trillium 20s Compact S/N 4109, Centaur S/N 1631"}FDSNXML:Sensor2020-09-03T15:00:00.0000ZSouth of Coyonosa31.21173-103.077808United Statesfalsetrue2020-09-03T15:00:00.0000Z31.21173-103.0778082020-09-03T15:00:00.0000ZC11461T6A941100100024489840001M/Sfalse{"type":"Guralp CMG-6T","description":"Guralp CMG-6T","manufacturer":"Guralp","model":"CMG-6T","serialNumber":"T6A94","resourceId":"NRL/Guralp/CMG6T.30_100.2400"}FDSNXML:Sensor{"type":"Guralp CMG-6TD","manufacturer":"Guralp","model":"CMG-6TD","serialNumber":"C1146","resourceId":"NRL/Guralp/CMG_6TD.1.800_200_100.100"}FDSNXML:DataLogger2020-09-03T15:00:00.0000ZC11462T6A9421001090024489840001M/Sfalse{"type":"Guralp CMG-6T","description":"Guralp CMG-6T","manufacturer":"Guralp","model":"CMG-6T","serialNumber":"T6A94","resourceId":"NRL/Guralp/CMG6T.30_100.2400"}FDSNXML:Sensor{"type":"Guralp CMG-6TD","manufacturer":"Guralp","model":"CMG-6TD","serialNumber":"C1146","resourceId":"NRL/Guralp/CMG_6TD.1.800_200_100.100"}FDSNXML:DataLogger2020-09-03T15:00:00.0000ZC11460T6A940100100-9024489840001M/Sfalse{"type":"Guralp CMG-6T","description":"Guralp CMG-6T","manufacturer":"Guralp","model":"CMG-6T","serialNumber":"T6A94","resourceId":"NRL/Guralp/CMG6T.30_100.2400"}FDSNXML:Sensor{"type":"Guralp CMG-6TD","manufacturer":"Guralp","model":"CMG-6TD","serialNumber":"C1146","resourceId":"NRL/Guralp/CMG_6TD.1.800_200_100.100"}FDSNXML:DataLogger2020-03-05T22:00:00.0000ZCulberson South31.66864-104.500771204falsetrue2020-03-05T22:00:00.0000Z31.66864-104.5007712042020-03-05T22:00:00.0000Zxxxx1yyyy110010004810000001m/sTfalse{"description":"Trillium Compact 20s S/N 4132, Centaur S/N 191"}FDSNXML:Sensor2020-03-05T22:00:00.0000Zxxxx2yyyy2100109004810000001m/sTfalse{"description":"Trillium Compact 20s S/N 4132, Centaur S/N 191"}FDSNXML:Sensor2020-03-05T22:00:00.0000Zxxxx0yyyy0100100-904810000001m/sTfalse{"description":"Trillium Compact 20s S/N 4132, Centaur S/N 191"}FDSNXML:Sensor2020-03-06T00:00:00.0000ZCulberson North31.75299-104.5144531242falsetrue2020-03-06T00:00:00.0000Z31.75299-104.51445312422020-03-06T00:00:00.0000Zxxxx1yyyy110010003000000001m/sTfalse{"description":"Silicon Audio 213-40 750 volt"}FDSNXML:Sensor2020-03-06T00:00:00.0000Zxxxx2yyyy2100109003000000001m/sTfalse{"description":"Silicon Audio 213-40 750 volt"}FDSNXML:Sensor2020-03-06T00:00:00.0000Zxxxx0yyyy0100100-903000000001m/sTfalse{"description":"Silicon Audio 213-40 750 volt"}FDSNXML:Sensor2020-03-04T06:30:00.0000ZPecos City Water South31.2804-103.3227829falsetrue2020-03-04T06:30:00.0000Z31.2804-103.32278292020-03-04T06:30:00.0000Zxxxx1yyyy1100100024489800001m/sGCfalse{"description":"Guralp 6TD S/N 6A93"}FDSNXML:Sensor2020-03-04T06:30:00.0000Zxxxx2yyyy21001090024489800001m/sGCfalse{"description":"Guralp 6TD S/N 6A93"}FDSNXML:Sensor2020-03-04T06:30:00.0000Zxxxx0yyyy0100100-9024489800001m/sGCfalse{"description":"Guralp 6TD S/N 6A93"}FDSNXML:Sensor2020-06-22T23:00:00.0000ZCR 11131.10306-103.37278861falsetrue2020-06-22T23:00:00.0000Z31.10306-103.372788612020-06-22T23:00:00.0000Zxxxx1yyyy1100100024489800001m/sGCfalse{"description":"Guralp CMG 6TD S/N 6A77"}FDSNXML:Sensor2020-06-22T23:00:00.0000Zxxxx2yyyy21001090024489800001m/sGCfalse{"description":"Guralp CMG 6TD S/N 6A77"}FDSNXML:Sensor2020-06-22T23:00:00.0000Zxxxx0yyyy0100100-9024489800001m/sGCfalse{"description":"Guralp CMG 6TD S/N 6A77"}FDSNXML:Sensor2020-07-07T18:00:00.0000ZCR 11131.534929-104.0540451006falsetrue2020-07-07T18:00:00.0000Z31.534929-104.05404510062020-07-07T18:00:00.0000Zxxxx1yyyy1100100024489800001m/sGCfalse{"description":"Guralp CMG 6TD S/N 6V98"}FDSNXML:Sensor2020-07-07T18:00:00.0000Zxxxx2yyyy21001090024489800001m/sGCfalse{"description":"Guralp CMG 6TD S/N 6V98"}FDSNXML:Sensor2020-07-07T18:00:00.0000Zxxxx0yyyy0100100-9024489800001m/sGCfalse{"description":"Guralp CMG 6TD S/N 6V98"}FDSNXML:Sensor2017-02-01T00:00:00.0000ZPecos31.370449-103.866966914falsetrue2017-02-01T00:00:00.0000Z31.370449-103.8669669142017-02-01T00:00:00.0000Zxxxx1yyyy120015.88904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0530, Centaur S/N 1789"}FDSNXML:Sensor2017-02-01T00:00:00.0000Zxxxx2yyyy220015.8004774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0530, Centaur S/N 1789"}FDSNXML:Sensor2017-02-01T00:00:00.0000Zxxxx0yyyy020015.889-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0530, Centaur S/N 1789"}FDSNXML:Sensor2018-10-25T00:00:00.0000ZTexas Public Hunt System CROSS BAR MGMT. AREA35.390862-101.9474331094falsetrue2018-10-25T00:00:00.0000Z35.390862-101.94743310942018-10-25T00:00:00.0000Zxxxx1yyyy1010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4134 Centaur S/N 1753"}FDSNXML:Sensor2018-10-25T00:00:00.0000Zxxxx2yyyy20109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4134 Centaur S/N 1753"}FDSNXML:Sensor2018-10-25T00:00:00.0000Zxxxx0yyyy00100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4134 Centaur S/N 1753"}FDSNXML:Sensor2018-10-25T00:00:00.0000ZPantex35.357372-101.5513611061falsetrue2018-10-25T00:00:00.0000Z35.357372-101.55136110612018-10-25T00:00:00.0000Zxxxx1yyyy110010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4133, Centaur S/N 1654"}FDSNXML:Sensor2018-10-25T00:00:00.0000Zxxxx2yyyy2100109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4133, Centaur S/N 1654"}FDSNXML:Sensor2018-10-25T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4133, Centaur S/N 1654"}FDSNXML:Sensor2017-08-08T00:00:00.0000ZPalo Pinto32.812759-98.315781336falsetrue2017-08-08T00:00:00.0000Z32.812759-98.3157813362017-08-08T00:00:00.0000Zxxxx1yyyy120016.18904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0544, Centaur S/N 1658"}FDSNXML:Sensor2017-08-08T00:00:00.0000Zxxxx2yyyy220016.117904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0544, Centaur S/N 1658"}FDSNXML:Sensor2017-08-08T00:00:00.0000Zxxxx0yyyy020016.189-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0544, Centaur S/N 1658"}FDSNXML:Sensor2016-12-16T00:00:00.0000Z2026-12-31T23:59:59.0000ZPost33.0662-101.501053795falsetrue2016-12-16T00:00:00.0000Z2026-12-31T23:59:59.0000Z33.0662-101.5010537952016-12-16T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx1yyyy120016.113104774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0523, Centaur S/N 1660"}FDSNXML:Sensor2016-12-16T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx2yyyy220016.122104774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0523, Centaur S/N 1660"}FDSNXML:Sensor2016-12-16T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx0yyyy020016.1131-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0523, Centaur S/N 1660"}FDSNXML:Sensor2017-08-10T00:00:00.0000ZRita Blanca36.43795-102.7374271284falsetrue2017-08-10T00:00:00.0000Z36.43795-102.73742712842017-08-10T00:00:00.0000Zxxxx1yyyy120016.19404774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0541, Centaur S/N 1635"}FDSNXML:Sensor2017-08-10T00:00:00.0000Zxxxx2yyyy220016.118404774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0541, Centaur S/N 1635"}FDSNXML:Sensor2017-08-10T00:00:00.0000Zxxxx0yyyy020016.194-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0541, Centaur S/N 1635"}FDSNXML:Sensor2017-02-08T00:00:00.0000Z2026-12-31T23:59:59.0000ZSanderson29.95471-102.123306688.8falsetrue2017-02-08T00:00:00.0000Z2026-12-31T23:59:59.0000Z29.95471-102.123306688.82017-02-08T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx1yyyy120016.123104774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0534, Centaur S/N 1702"}FDSNXML:Sensor2017-02-08T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx2yyyy220016.132104774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0534, Centaur S/N 1702"}FDSNXML:Sensor2017-02-08T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx0yyyy020016.1231-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0534, Centaur S/N 1702"}FDSNXML:Sensor2017-02-08T00:00:00.0000Z2026-12-31T23:59:59.0000ZSterling City31.91436-101.126389776falsetrue2017-02-08T00:00:00.0000Z2026-12-31T23:59:59.0000Z31.91436-101.1263897762017-02-08T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx1yyyy120016.229004774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0540, Centaur S/N 1694"}FDSNXML:Sensor2017-02-08T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx2yyyy220016.22004774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0540, Centaur S/N 1694"}FDSNXML:Sensor2017-02-08T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx0yyyy020016.2290-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0540, Centaur S/N 1694"}FDSNXML:Sensor2017-08-08T00:00:00.0000ZSamnorwood35.104549-100.242477704falsetrue2017-08-08T00:00:00.0000Z35.104549-100.2424777042017-08-08T00:00:00.0000Zxxxx1yyyy120016.133604774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0518, Centaur S/N 1636"}FDSNXML:Sensor2017-08-08T00:00:00.0000Zxxxx2yyyy220016.16604774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0518, Centaur S/N 1636"}FDSNXML:Sensor2017-08-08T00:00:00.0000Zxxxx0yyyy020016.1336-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0518, Centaur S/N 1636"}FDSNXML:Sensor2018-02-14T00:00:00.0000Z2026-12-31T23:59:59.0000ZSnyder 232.864521-100.911057748falsetrue2018-02-14T00:00:00.0000Z2026-12-31T23:59:59.0000Z32.864521-100.9110577482018-02-14T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx1yyyy120010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4141, Centaur S/N 1752"}FDSNXML:Sensor2018-02-14T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx2yyyy2200109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4141, Centaur S/N 1752"}FDSNXML:Sensor2018-02-14T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx0yyyy0200100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4141, Centaur S/N 1752"}FDSNXML:Sensor2018-02-14T00:00:00.0000Z2026-12-31T00:00:00.0000ZSnyder 332.92429-100.941177744falsetrue2018-02-14T00:00:00.0000Z2026-12-31T00:00:00.0000Z32.92429-100.9411777442018-02-14T00:00:00.0000Z2026-12-31T00:00:00.0000Zxxxx1yyyy120010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4132, Centaur S/N 1629"}FDSNXML:Sensor2018-02-14T00:00:00.0000Z2026-12-31T00:00:00.0000Zxxxx2yyyy2200109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4132, Centaur S/N 1629"}FDSNXML:Sensor2018-02-14T00:00:00.0000Z2026-12-31T00:00:00.0000Zxxxx0yyyy0200100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4132, Centaur S/N 1629"}FDSNXML:Sensor2018-02-14T00:00:00.0000ZSnyder 0733.090919-100.891724620falsetrue2018-02-14T00:00:00.0000Z33.090919-100.8917246202018-02-14T00:00:00.0000Zxxxx1yyyy120010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4130, Centaur S/N 1782"}FDSNXML:Sensor2018-02-14T00:00:00.0000Zxxxx2yyyy2200109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4130, Centaur S/N 1782"}FDSNXML:Sensor2018-02-14T00:00:00.0000Zxxxx0yyyy0200100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4130, Centaur S/N 1782"}FDSNXML:Sensor2018-11-29T19:22:22.0000ZArah32.782661-101.060707741falsetrue2018-11-29T19:22:22.0000Z32.782661-101.0607077412018-11-29T19:22:22.0000Zxxxx1yyyy120010002989930001M/STfalse{"description":"Centaur S/N 1633, Trillium Compact 20s S/N 4119"}FDSNXML:Sensor2018-11-29T19:22:22.0000Zxxxx2yyyy2200109002989930001M/STfalse{"description":"Centaur S/N 1633, Trillium Compact 20s S/N 4119"}FDSNXML:Sensor2018-11-29T19:22:22.0000Zxxxx0yyyy0200100-902989930001M/STfalse{"description":"Centaur S/N 1633, Trillium Compact 20s S/N 4119"}FDSNXML:Sensor2019-10-01T16:45:00.0000ZHermleigh32.67297-100.73941663falsetrue2019-10-01T16:45:00.0000Z32.67297-100.739416632019-10-01T16:45:00.0000Zxxxx1yyyy120010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 2361, Centaur S/N 1663"}FDSNXML:Sensor2019-10-01T16:45:00.0000Zxxxx2yyyy2200109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 2361, Centaur S/N 1663"}FDSNXML:Sensor2019-10-01T16:45:00.0000Zxxxx0yyyy0200100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 2361, Centaur S/N 1663"}FDSNXML:Sensor2019-03-21T00:00:00.0000ZRotan33.049179-100.723877658falsetrue2019-03-21T00:00:00.0000Z33.049179-100.7238776582019-03-21T00:00:00.0000Zxxxx1yyyy110010002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4131, Centaur S/N 1711"}FDSNXML:Sensor2019-03-21T00:00:00.0000Zxxxx2yyyy2100109002989930001M/STfalse{"description":"Trillium 20s Compact S/N 4131, Centaur S/N 1711"}FDSNXML:Sensor2019-03-21T00:00:00.0000Zxxxx0yyyy0100100-902989930001M/STfalse{"description":"Trillium 20s Compact S/N 4131, Centaur S/N 1711"}FDSNXML:Sensor2017-08-04T00:00:00.0000ZSan Augustine31.518181-94.17877284falsetrue2017-08-04T00:00:00.0000Z31.518181-94.178772842017-08-04T00:00:00.0000Zxxxx1yyyy120016.115504774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0532, Centaur S/N 1622"}FDSNXML:Sensor2017-08-04T00:00:00.0000Zxxxx2yyyy220016.124504774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0532, Centaur S/N 1622"}FDSNXML:Sensor2017-08-04T00:00:00.0000Zxxxx0yyyy020016.1155-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0532, Centaur S/N 1622"}FDSNXML:Sensor2017-08-06T00:00:00.0000ZTerrell32.730389-96.089333174falsetrue2017-08-06T00:00:00.0000Z32.730389-96.0893331742017-08-06T00:00:00.0000Zxxxx1yyyy120016.121504774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0528, Centaur S/N 1664"}FDSNXML:Sensor2017-08-06T00:00:00.0000Zxxxx2yyyy220016.19002989930001M/STfalse{"description":"Trillium 120 Posthole S/N 0528, Centaur S/N 1664"}FDSNXML:Sensor2017-08-06T00:00:00.0000Zxxxx0yyyy020016.1215-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 0528, Centaur S/N 1664"}FDSNXML:Sensor2017-02-01T00:00:00.0000Z2026-12-31T23:59:59.0000ZVan Horn30.7866-104.9851531342falsetrue2017-02-01T00:00:00.0000Z2026-12-31T23:59:59.0000Z30.7866-104.98515313422017-02-01T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx1yyyy120016.516904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 1799, Centaur S/N 0542"}FDSNXML:Sensor2017-02-01T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx2yyyy220016.525904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 1799, Centaur S/N 0542"}FDSNXML:Sensor2017-02-01T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx0yyyy020016.5169-904774130001M/STfalse{"description":"Trillium 120 Posthole S/N 1799, Centaur S/N 0542"}FDSNXML:Sensor2016-12-16T00:00:00.0000Z2026-12-31T23:59:59.0000ZWitchita Falls33.76622-98.464546277falsetrue2016-12-16T00:00:00.0000Z2026-12-31T23:59:59.0000Z33.76622-98.4645462772016-12-16T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx1yyyy120016.16204774130001M/STfalse{"description":"Tillium 120 Posthole S/N 0522, Centaur S/N 1680"}FDSNXML:Sensor2016-12-16T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx2yyyy220016.115204774130001M/STfalse{"description":"Tillium 120 Posthole S/N 0522, Centaur S/N 1680"}FDSNXML:Sensor2016-12-16T00:00:00.0000Z2026-12-31T23:59:59.0000Zxxxx0yyyy020016.162-904774130001M/STfalse{"description":"Tillium 120 Posthole S/N 0522, Centaur S/N 1680"}FDSNXML:Sensor diff --git a/etc/kernel.cfg b/etc/kernel.cfg new file mode 100644 index 0000000..e69de29 diff --git a/etc/key/global/profile_00_BHZ b/etc/key/global/profile_00_BHZ new file mode 100644 index 0000000..9fba17a --- /dev/null +++ b/etc/key/global/profile_00_BHZ @@ -0,0 +1,10 @@ +# The sensor location code of the preferred stream used e.g. by scautopick and +# scrttv. +detecLocid = 00 + +# The channel code of the preferred stream used by e.g. scautopick and scrttv. +# If no component code is given, the vertical component will be fetched from +# inventory considering the channel orientation (azimuth, dip). If that +# approach fails, 'Z' will be appended and used as fallback. Do not use regular +# expressions! +detecStream = BHZ diff --git a/etc/key/global/profile_00_HHZ b/etc/key/global/profile_00_HHZ new file mode 100644 index 0000000..54fde2b --- /dev/null +++ b/etc/key/global/profile_00_HHZ @@ -0,0 +1,10 @@ +# The sensor location code of the preferred stream used e.g. by scautopick and +# scrttv. +detecLocid = 00 + +# The channel code of the preferred stream used by e.g. scautopick and scrttv. +# If no component code is given, the vertical component will be fetched from +# inventory considering the channel orientation (azimuth, dip). If that +# approach fails, 'Z' will be appended and used as fallback. Do not use regular +# expressions! +detecStream = HHZ diff --git a/etc/key/global/profile_BHZ b/etc/key/global/profile_BHZ new file mode 100644 index 0000000..65932e7 --- /dev/null +++ b/etc/key/global/profile_BHZ @@ -0,0 +1,6 @@ +# The channel code of the preferred stream used by e.g. scautopick and scrttv. +# If no component code is given, the vertical component will be fetched from +# inventory considering the channel orientation (azimuth, dip). If that +# approach fails, 'Z' will be appended and used as fallback. Do not use regular +# expressions! +detecStream = BHZ diff --git a/etc/key/global/profile_HHZ b/etc/key/global/profile_HHZ new file mode 100644 index 0000000..cf97a50 --- /dev/null +++ b/etc/key/global/profile_HHZ @@ -0,0 +1,6 @@ +# The channel code of the preferred stream used by e.g. scautopick and scrttv. +# If no component code is given, the vertical component will be fetched from +# inventory considering the channel orientation (azimuth, dip). If that +# approach fails, 'Z' will be appended and used as fallback. Do not use regular +# expressions! +detecStream = HHZ diff --git a/etc/key/scautopick/profile_regional b/etc/key/scautopick/profile_regional new file mode 100644 index 0000000..083cae6 --- /dev/null +++ b/etc/key/scautopick/profile_regional @@ -0,0 +1,5 @@ +# Defines the filter to be used for picking. +detecFilter = "RMHP(10)>>ITAPER(30)>>BW(4,3,20)>>STALTA(0.5,10)" + +# The time correction applied to a pick. +timeCorr = 0 diff --git a/etc/key/scautopick/profile_teleseismic b/etc/key/scautopick/profile_teleseismic new file mode 100644 index 0000000..e69de29 diff --git a/etc/key/seedlink/profile_geofon b/etc/key/seedlink/profile_geofon new file mode 100644 index 0000000..acf9152 --- /dev/null +++ b/etc/key/seedlink/profile_geofon @@ -0,0 +1,6 @@ +# Activated plugins for category sources +sources = chain + +# List of stream selectors. If left empty all available streams will be +# requested. See slinktool manpage for more information. +sources.chain.selectors = BH?.D,HH?.D diff --git a/etc/key/seedlink/profile_geofon_BH b/etc/key/seedlink/profile_geofon_BH new file mode 100644 index 0000000..5e66c4f --- /dev/null +++ b/etc/key/seedlink/profile_geofon_BH @@ -0,0 +1,6 @@ +# Activated plugins for category sources +sources = chain + +# List of stream selectors. If left empty all available streams will be +# requested. See slinktool manpage for more information. +sources.chain.selectors = BH?.D diff --git a/etc/key/seedlink/profile_iris_00_BH b/etc/key/seedlink/profile_iris_00_BH new file mode 100644 index 0000000..14a6f2f --- /dev/null +++ b/etc/key/seedlink/profile_iris_00_BH @@ -0,0 +1,9 @@ +# Activated plugins for category sources +sources = chain + +# Hostname or IP of the Seedlink server. +sources.chain.address = rtserve.iris.washington.edu + +# List of stream selectors. If left empty all available streams will be +# requested. See slinktool manpage for more information. +sources.chain.selectors = 00BH?.D diff --git a/etc/key/seedlink/profile_iris_00_HH b/etc/key/seedlink/profile_iris_00_HH new file mode 100644 index 0000000..d4512d7 --- /dev/null +++ b/etc/key/seedlink/profile_iris_00_HH @@ -0,0 +1,9 @@ +# Activated plugins for category sources +sources = chain + +# Hostname or IP of the Seedlink server. +sources.chain.address = rtserve.iris.washington.edu + +# List of stream selectors. If left empty all available streams will be +# requested. See slinktool manpage for more information. +sources.chain.selectors = 00HH?.D diff --git a/etc/key/slarchive/profile_7days b/etc/key/slarchive/profile_7days new file mode 100644 index 0000000..2b9291d --- /dev/null +++ b/etc/key/slarchive/profile_7days @@ -0,0 +1,3 @@ +# Number of days the data is kept in the archive. This requires purge_datafile +# to be run as cronjob. +keep = 7 diff --git a/etc/key/station_CX_HMBCX b/etc/key/station_CX_HMBCX new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_HMBCX @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_MNMCX b/etc/key/station_CX_MNMCX new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_MNMCX @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PATCX b/etc/key/station_CX_PATCX new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PATCX @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB01 b/etc/key/station_CX_PB01 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB01 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB02 b/etc/key/station_CX_PB02 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB02 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB03 b/etc/key/station_CX_PB03 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB03 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB04 b/etc/key/station_CX_PB04 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB04 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB05 b/etc/key/station_CX_PB05 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB05 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB06 b/etc/key/station_CX_PB06 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB06 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB07 b/etc/key/station_CX_PB07 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB07 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB08 b/etc/key/station_CX_PB08 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB08 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB09 b/etc/key/station_CX_PB09 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB09 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB10 b/etc/key/station_CX_PB10 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB10 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB11 b/etc/key/station_CX_PB11 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB11 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB12 b/etc/key/station_CX_PB12 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB12 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB13 b/etc/key/station_CX_PB13 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB13 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB14 b/etc/key/station_CX_PB14 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB14 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB15 b/etc/key/station_CX_PB15 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB15 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB16 b/etc/key/station_CX_PB16 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB16 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB18 b/etc/key/station_CX_PB18 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB18 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PB19 b/etc/key/station_CX_PB19 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PB19 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PSGCX b/etc/key/station_CX_PSGCX new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PSGCX @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_CX_PX06 b/etc/key/station_CX_PX06 new file mode 100644 index 0000000..cdfee6c --- /dev/null +++ b/etc/key/station_CX_PX06 @@ -0,0 +1,5 @@ +# Binding references +global:HHZ +scautopick:regional +seedlink:geofon +slarchive:7days diff --git a/etc/key/station_GE_ACRG b/etc/key/station_GE_ACRG new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_ACRG @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_APE b/etc/key/station_GE_APE new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_APE @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_ARPR b/etc/key/station_GE_ARPR new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_ARPR @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_BALJ b/etc/key/station_GE_BALJ new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_BALJ @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_BBJI b/etc/key/station_GE_BBJI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_BBJI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_BKB b/etc/key/station_GE_BKB new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_BKB @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_BKNI b/etc/key/station_GE_BKNI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_BKNI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_BNDI b/etc/key/station_GE_BNDI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_BNDI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_BOAB b/etc/key/station_GE_BOAB new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_BOAB @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_CSS b/etc/key/station_GE_CSS new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_CSS @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_DAMY b/etc/key/station_GE_DAMY new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_DAMY @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_DSB b/etc/key/station_GE_DSB new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_DSB @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_EIL b/etc/key/station_GE_EIL new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_EIL @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_FAKI b/etc/key/station_GE_FAKI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_FAKI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_FALKS b/etc/key/station_GE_FALKS new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_FALKS @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_FLT1 b/etc/key/station_GE_FLT1 new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_FLT1 @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_GENI b/etc/key/station_GE_GENI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_GENI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_GHAJ b/etc/key/station_GE_GHAJ new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_GHAJ @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_GSI b/etc/key/station_GE_GSI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_GSI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_GVD b/etc/key/station_GE_GVD new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_GVD @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_HALK b/etc/key/station_GE_HALK new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_HALK @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_HLG b/etc/key/station_GE_HLG new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_HLG @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_HMDM b/etc/key/station_GE_HMDM new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_HMDM @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_IBBN b/etc/key/station_GE_IBBN new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_IBBN @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_ILTH b/etc/key/station_GE_ILTH new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_ILTH @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_IMMV b/etc/key/station_GE_IMMV new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_IMMV @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_ISP b/etc/key/station_GE_ISP new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_ISP @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_JAGI b/etc/key/station_GE_JAGI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_JAGI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_KAAM b/etc/key/station_GE_KAAM new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_KAAM @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_KARP b/etc/key/station_GE_KARP new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_KARP @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_KBU b/etc/key/station_GE_KBU new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_KBU @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_KERA b/etc/key/station_GE_KERA new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_KERA @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_KIBK b/etc/key/station_GE_KIBK new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_KIBK @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_KMBO b/etc/key/station_GE_KMBO new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_KMBO @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_KTHA b/etc/key/station_GE_KTHA new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_KTHA @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_LHMI b/etc/key/station_GE_LHMI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_LHMI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_LODK b/etc/key/station_GE_LODK new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_LODK @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_LUWI b/etc/key/station_GE_LUWI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_LUWI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_LVC b/etc/key/station_GE_LVC new file mode 100644 index 0000000..1c81af2 --- /dev/null +++ b/etc/key/station_GE_LVC @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:regional +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_MALK b/etc/key/station_GE_MALK new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_MALK @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_MARCO b/etc/key/station_GE_MARCO new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_MARCO @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_MATE b/etc/key/station_GE_MATE new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_MATE @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_MMRI b/etc/key/station_GE_MMRI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_MMRI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_MNAI b/etc/key/station_GE_MNAI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_MNAI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_MORC b/etc/key/station_GE_MORC new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_MORC @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_MSBI b/etc/key/station_GE_MSBI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_MSBI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_MTE b/etc/key/station_GE_MTE new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_MTE @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_NPW b/etc/key/station_GE_NPW new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_NPW @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_PABE b/etc/key/station_GE_PABE new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_PABE @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_PBUR b/etc/key/station_GE_PBUR new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_PBUR @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_PLAI b/etc/key/station_GE_PLAI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_PLAI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_PMBI b/etc/key/station_GE_PMBI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_PMBI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_PMG b/etc/key/station_GE_PMG new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_PMG @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_PSZ b/etc/key/station_GE_PSZ new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_PSZ @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_PUL b/etc/key/station_GE_PUL new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_PUL @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_RGN b/etc/key/station_GE_RGN new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_RGN @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_RUE b/etc/key/station_GE_RUE new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_RUE @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_SALP b/etc/key/station_GE_SALP new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_SALP @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_SALTA b/etc/key/station_GE_SALTA new file mode 100644 index 0000000..1c81af2 --- /dev/null +++ b/etc/key/station_GE_SALTA @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:regional +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_SANI b/etc/key/station_GE_SANI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_SANI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_SAUI b/etc/key/station_GE_SAUI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_SAUI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_SBV b/etc/key/station_GE_SBV new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_SBV @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_SFJD b/etc/key/station_GE_SFJD new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_SFJD @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_SLIT b/etc/key/station_GE_SLIT new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_SLIT @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_SMRI b/etc/key/station_GE_SMRI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_SMRI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_SNAA b/etc/key/station_GE_SNAA new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_SNAA @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_SOCY b/etc/key/station_GE_SOCY new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_SOCY @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_SOEI b/etc/key/station_GE_SOEI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_SOEI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_STU b/etc/key/station_GE_STU new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_STU @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_THERA b/etc/key/station_GE_THERA new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_THERA @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_TIRR b/etc/key/station_GE_TIRR new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_TIRR @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_TNTI b/etc/key/station_GE_TNTI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_TNTI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_TOLI2 b/etc/key/station_GE_TOLI2 new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_TOLI2 @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_UGM b/etc/key/station_GE_UGM new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_UGM @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_UJAP b/etc/key/station_GE_UJAP new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_UJAP @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_VAL b/etc/key/station_GE_VAL new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_VAL @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_VOI b/etc/key/station_GE_VOI new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_VOI @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_VSU b/etc/key/station_GE_VSU new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_VSU @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_WIN b/etc/key/station_GE_WIN new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_WIN @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_WLF b/etc/key/station_GE_WLF new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_WLF @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_GE_ZKR b/etc/key/station_GE_ZKR new file mode 100644 index 0000000..5998fbd --- /dev/null +++ b/etc/key/station_GE_ZKR @@ -0,0 +1,5 @@ +# Binding references +global:BHZ +scautopick:teleseismic +seedlink:geofon_BH +slarchive:7days diff --git a/etc/key/station_II_AAK b/etc/key/station_II_AAK new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_AAK @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_ABPO b/etc/key/station_II_ABPO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_ABPO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_ALE b/etc/key/station_II_ALE new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_ALE @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_ARTI b/etc/key/station_II_ARTI new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_ARTI @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_ASCN b/etc/key/station_II_ASCN new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_ASCN @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_BFO b/etc/key/station_II_BFO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_BFO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_BORG b/etc/key/station_II_BORG new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_BORG @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_BORK b/etc/key/station_II_BORK new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_BORK @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_CMLA b/etc/key/station_II_CMLA new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_CMLA @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_COCO b/etc/key/station_II_COCO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_COCO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_DGAR b/etc/key/station_II_DGAR new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_DGAR @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_EFI b/etc/key/station_II_EFI new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_EFI @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_ERM b/etc/key/station_II_ERM new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_ERM @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_ESK b/etc/key/station_II_ESK new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_ESK @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_FFC b/etc/key/station_II_FFC new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_FFC @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_HOPE b/etc/key/station_II_HOPE new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_HOPE @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_IBFO b/etc/key/station_II_IBFO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_IBFO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_JTS b/etc/key/station_II_JTS new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_JTS @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_JZAX b/etc/key/station_II_JZAX new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_JZAX @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_KAPI b/etc/key/station_II_KAPI new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_KAPI @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_KDAK b/etc/key/station_II_KDAK new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_KDAK @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_KIV b/etc/key/station_II_KIV new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_KIV @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_KURK b/etc/key/station_II_KURK new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_KURK @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_KWJN b/etc/key/station_II_KWJN new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_KWJN @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_LVZ b/etc/key/station_II_LVZ new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_LVZ @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_MBAR b/etc/key/station_II_MBAR new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_MBAR @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_MSEY b/etc/key/station_II_MSEY new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_MSEY @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_MSVF b/etc/key/station_II_MSVF new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_MSVF @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_NIL b/etc/key/station_II_NIL new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_NIL @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_NNA b/etc/key/station_II_NNA new file mode 100644 index 0000000..474977b --- /dev/null +++ b/etc/key/station_II_NNA @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:regional +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_OBN b/etc/key/station_II_OBN new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_OBN @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_PALK b/etc/key/station_II_PALK new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_PALK @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_PFO b/etc/key/station_II_PFO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_PFO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_RAYN b/etc/key/station_II_RAYN new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_RAYN @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_RPN b/etc/key/station_II_RPN new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_RPN @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_SACV b/etc/key/station_II_SACV new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_SACV @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_SHEL b/etc/key/station_II_SHEL new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_SHEL @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_SIMI b/etc/key/station_II_SIMI new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_SIMI @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_SUR b/etc/key/station_II_SUR new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_SUR @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_TAU b/etc/key/station_II_TAU new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_TAU @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_TLY b/etc/key/station_II_TLY new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_TLY @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_UOSS b/etc/key/station_II_UOSS new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_UOSS @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_WRAB b/etc/key/station_II_WRAB new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_WRAB @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_II_XPFO b/etc/key/station_II_XPFO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_II_XPFO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_ADK b/etc/key/station_IU_ADK new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_ADK @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_AFI b/etc/key/station_IU_AFI new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_AFI @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_ANMO b/etc/key/station_IU_ANMO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_ANMO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_ANTO b/etc/key/station_IU_ANTO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_ANTO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_BBSR b/etc/key/station_IU_BBSR new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_BBSR @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_BILL b/etc/key/station_IU_BILL new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_BILL @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_CASY b/etc/key/station_IU_CASY new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_CASY @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_CCM b/etc/key/station_IU_CCM new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_CCM @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_CHTO b/etc/key/station_IU_CHTO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_CHTO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_COLA b/etc/key/station_IU_COLA new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_COLA @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_COR b/etc/key/station_IU_COR new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_COR @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_CTAO b/etc/key/station_IU_CTAO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_CTAO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_DAV b/etc/key/station_IU_DAV new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_DAV @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_DWPF b/etc/key/station_IU_DWPF new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_DWPF @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_FUNA b/etc/key/station_IU_FUNA new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_FUNA @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_FURI b/etc/key/station_IU_FURI new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_FURI @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_GNI b/etc/key/station_IU_GNI new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_GNI @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_GRFO b/etc/key/station_IU_GRFO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_GRFO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_GUMO b/etc/key/station_IU_GUMO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_GUMO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_HKT b/etc/key/station_IU_HKT new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_HKT @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_HNR b/etc/key/station_IU_HNR new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_HNR @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_HRV b/etc/key/station_IU_HRV new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_HRV @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_INCN b/etc/key/station_IU_INCN new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_INCN @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_JOHN b/etc/key/station_IU_JOHN new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_JOHN @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_KBS b/etc/key/station_IU_KBS new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_KBS @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_KEV b/etc/key/station_IU_KEV new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_KEV @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_KIEV b/etc/key/station_IU_KIEV new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_KIEV @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_KIP b/etc/key/station_IU_KIP new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_KIP @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_KMBO b/etc/key/station_IU_KMBO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_KMBO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_KNTN b/etc/key/station_IU_KNTN new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_KNTN @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_KONO b/etc/key/station_IU_KONO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_KONO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_LCO b/etc/key/station_IU_LCO new file mode 100644 index 0000000..474977b --- /dev/null +++ b/etc/key/station_IU_LCO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:regional +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_LSZ b/etc/key/station_IU_LSZ new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_LSZ @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_MA2 b/etc/key/station_IU_MA2 new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_MA2 @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_MACI b/etc/key/station_IU_MACI new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_MACI @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_MAJO b/etc/key/station_IU_MAJO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_MAJO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_MAKZ b/etc/key/station_IU_MAKZ new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_MAKZ @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_MBWA b/etc/key/station_IU_MBWA new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_MBWA @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_MIDW b/etc/key/station_IU_MIDW new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_MIDW @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_NWAO b/etc/key/station_IU_NWAO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_NWAO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_OTAV b/etc/key/station_IU_OTAV new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_OTAV @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_PAB b/etc/key/station_IU_PAB new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_PAB @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_PAYG b/etc/key/station_IU_PAYG new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_PAYG @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_PET b/etc/key/station_IU_PET new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_PET @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_PMG b/etc/key/station_IU_PMG new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_PMG @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_PMSA b/etc/key/station_IU_PMSA new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_PMSA @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_POHA b/etc/key/station_IU_POHA new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_POHA @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_PTCN b/etc/key/station_IU_PTCN new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_PTCN @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_QSPA b/etc/key/station_IU_QSPA new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_QSPA @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_RAO b/etc/key/station_IU_RAO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_RAO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_RAR b/etc/key/station_IU_RAR new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_RAR @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_RCBR b/etc/key/station_IU_RCBR new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_RCBR @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_RSSD b/etc/key/station_IU_RSSD new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_RSSD @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_SAML b/etc/key/station_IU_SAML new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_SAML @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_SBA b/etc/key/station_IU_SBA new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_SBA @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_SDV b/etc/key/station_IU_SDV new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_SDV @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_SFJD b/etc/key/station_IU_SFJD new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_SFJD @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_SJG b/etc/key/station_IU_SJG new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_SJG @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_SLBS b/etc/key/station_IU_SLBS new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_SLBS @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_SNZO b/etc/key/station_IU_SNZO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_SNZO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_SSPA b/etc/key/station_IU_SSPA new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_SSPA @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_TARA b/etc/key/station_IU_TARA new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_TARA @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_TATO b/etc/key/station_IU_TATO new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_TATO @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_TEIG b/etc/key/station_IU_TEIG new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_TEIG @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_TIXI b/etc/key/station_IU_TIXI new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_TIXI @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_TRIS b/etc/key/station_IU_TRIS new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_TRIS @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_TRQA b/etc/key/station_IU_TRQA new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_TRQA @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_TSUM b/etc/key/station_IU_TSUM new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_TSUM @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_TUC b/etc/key/station_IU_TUC new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_TUC @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_ULN b/etc/key/station_IU_ULN new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_ULN @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_WAKE b/etc/key/station_IU_WAKE new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_WAKE @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_WCI b/etc/key/station_IU_WCI new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_WCI @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_WVT b/etc/key/station_IU_WVT new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_WVT @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_XMAS b/etc/key/station_IU_XMAS new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_XMAS @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_YAK b/etc/key/station_IU_YAK new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_YAK @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_IU_YSS b/etc/key/station_IU_YSS new file mode 100644 index 0000000..bcb8098 --- /dev/null +++ b/etc/key/station_IU_YSS @@ -0,0 +1,5 @@ +# Binding references +global:00_BHZ +scautopick:teleseismic +seedlink:iris_00_BH +slarchive:7days diff --git a/etc/key/station_TX_ALPN b/etc/key/station_TX_ALPN new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_ALPN @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_APMT b/etc/key/station_TX_APMT new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_APMT @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_BRDY b/etc/key/station_TX_BRDY new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_BRDY @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_CRHG b/etc/key/station_TX_CRHG new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_CRHG @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_DKNS b/etc/key/station_TX_DKNS new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_DKNS @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_DRIO b/etc/key/station_TX_DRIO new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_DRIO @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_DRZT b/etc/key/station_TX_DRZT new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_DRZT @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_EF02 b/etc/key/station_TX_EF02 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_EF02 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_EF03 b/etc/key/station_TX_EF03 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_EF03 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_EF04 b/etc/key/station_TX_EF04 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_EF04 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_EF05 b/etc/key/station_TX_EF05 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_EF05 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_ET01 b/etc/key/station_TX_ET01 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_ET01 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_FW01 b/etc/key/station_TX_FW01 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_FW01 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_FW02 b/etc/key/station_TX_FW02 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_FW02 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_FW03 b/etc/key/station_TX_FW03 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_FW03 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_FW04 b/etc/key/station_TX_FW04 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_FW04 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_FW05 b/etc/key/station_TX_FW05 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_FW05 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_FW06 b/etc/key/station_TX_FW06 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_FW06 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_FW07 b/etc/key/station_TX_FW07 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_FW07 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_FW09 b/etc/key/station_TX_FW09 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_FW09 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_FW11 b/etc/key/station_TX_FW11 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_FW11 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_FW12 b/etc/key/station_TX_FW12 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_FW12 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_FW13 b/etc/key/station_TX_FW13 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_FW13 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_FW14 b/etc/key/station_TX_FW14 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_FW14 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_FW15 b/etc/key/station_TX_FW15 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_FW15 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_FW16 b/etc/key/station_TX_FW16 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_FW16 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_HBVL b/etc/key/station_TX_HBVL new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_HBVL @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_HNDO b/etc/key/station_TX_HNDO new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_HNDO @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_HNVL b/etc/key/station_TX_HNVL new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_HNVL @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_MB01 b/etc/key/station_TX_MB01 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_MB01 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_MB02 b/etc/key/station_TX_MB02 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_MB02 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_MB04 b/etc/key/station_TX_MB04 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_MB04 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_MB05 b/etc/key/station_TX_MB05 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_MB05 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_MB06 b/etc/key/station_TX_MB06 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_MB06 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_MB07 b/etc/key/station_TX_MB07 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_MB07 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_MB09 b/etc/key/station_TX_MB09 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_MB09 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_MLDN b/etc/key/station_TX_MLDN new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_MLDN @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_MNHN b/etc/key/station_TX_MNHN new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_MNHN @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_ODSA b/etc/key/station_TX_ODSA new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_ODSA @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_OZNA b/etc/key/station_TX_OZNA new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_OZNA @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB01 b/etc/key/station_TX_PB01 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB01 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB02 b/etc/key/station_TX_PB02 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB02 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB03 b/etc/key/station_TX_PB03 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB03 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB04 b/etc/key/station_TX_PB04 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB04 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB05 b/etc/key/station_TX_PB05 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB05 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB06 b/etc/key/station_TX_PB06 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB06 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB07 b/etc/key/station_TX_PB07 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB07 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB08 b/etc/key/station_TX_PB08 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB08 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB09 b/etc/key/station_TX_PB09 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB09 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB10 b/etc/key/station_TX_PB10 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB10 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB11 b/etc/key/station_TX_PB11 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB11 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB12 b/etc/key/station_TX_PB12 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB12 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB13 b/etc/key/station_TX_PB13 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB13 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB14 b/etc/key/station_TX_PB14 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB14 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB15 b/etc/key/station_TX_PB15 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB15 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB16 b/etc/key/station_TX_PB16 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB16 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB17 b/etc/key/station_TX_PB17 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB17 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB18 b/etc/key/station_TX_PB18 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB18 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB19 b/etc/key/station_TX_PB19 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB19 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB21 b/etc/key/station_TX_PB21 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB21 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB22 b/etc/key/station_TX_PB22 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB22 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB28 b/etc/key/station_TX_PB28 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB28 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB29 b/etc/key/station_TX_PB29 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB29 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB30 b/etc/key/station_TX_PB30 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB30 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB32 b/etc/key/station_TX_PB32 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB32 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PB33 b/etc/key/station_TX_PB33 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PB33 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PECS b/etc/key/station_TX_PECS new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PECS @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PH02 b/etc/key/station_TX_PH02 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PH02 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PH03 b/etc/key/station_TX_PH03 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PH03 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_PLPT b/etc/key/station_TX_PLPT new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_PLPT @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_POST b/etc/key/station_TX_POST new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_POST @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_RTBA b/etc/key/station_TX_RTBA new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_RTBA @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_SAND b/etc/key/station_TX_SAND new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_SAND @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_SGCY b/etc/key/station_TX_SGCY new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_SGCY @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_SMWD b/etc/key/station_TX_SMWD new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_SMWD @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_SN02 b/etc/key/station_TX_SN02 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_SN02 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_SN03 b/etc/key/station_TX_SN03 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_SN03 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_SN07 b/etc/key/station_TX_SN07 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_SN07 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_SN08 b/etc/key/station_TX_SN08 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_SN08 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_SN09 b/etc/key/station_TX_SN09 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_SN09 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_SN10 b/etc/key/station_TX_SN10 new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_SN10 @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_SNAG b/etc/key/station_TX_SNAG new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_SNAG @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_TREL b/etc/key/station_TX_TREL new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_TREL @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_VHRN b/etc/key/station_TX_VHRN new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_VHRN @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/key/station_TX_WTFS b/etc/key/station_TX_WTFS new file mode 100644 index 0000000..d6ca206 --- /dev/null +++ b/etc/key/station_TX_WTFS @@ -0,0 +1,5 @@ +# Binding references +global:00_HHZ +scautopick:regional +seedlink:iris_00_HH +slarchive:7days diff --git a/etc/scautopick.cfg b/etc/scautopick.cfg new file mode 100644 index 0000000..7558506 --- /dev/null +++ b/etc/scautopick.cfg @@ -0,0 +1,20 @@ +# The re-picker to use. By default only simple detections are emitted as picks. +# To enable re-picking on a time window around the detection, an algorithm +# (plugin) can be defined with this parameter. +# Currently available: "AIC", "BK" or "GFZ". +# More options may be available by plugins. Configure related parameters in +# global bindings. +picker = "AIC" + +# The secondary picker to use, e.g., for picking S-phases. Currently available +# is: "S-L2". More options may be available by plugins. Configure related +# parameters in global bindings. +spicker = "S-L2" + +# If enabled, all secondary pickers that were triggered by a previous pick will +# be terminated when a new detection or pick has been found. This aims to avoid +# the case where an S phase is wrongly picked as P but would also be picked as +# S by the secondary picker. But suppressing the S pick can lead to undesired +# results. It might be better in some situations to have two picks (P and S) +# instead only a wrong P. +killPendingSPickers = false diff --git a/etc/scmaster.cfg b/etc/scmaster.cfg new file mode 100644 index 0000000..156a4fa --- /dev/null +++ b/etc/scmaster.cfg @@ -0,0 +1,26 @@ +# Default plugins to load. Application specific configuration +# files should use the 'plugins' entry to specify additional +# plugins otherwise when using 'core.plugins' also these +# default values are going to be overwritten. +# +# To be able to read from all supported databases all available +# database plugins are loaded as 'core'. +# All currently supported db backends: dbmysql, dbpostgresql, dbsqlite3 +core.plugins = dbmysql + +# The plugins loaded and executed for this particular queue. +# The execution order is exactly the same order as given here. +queues.production.plugins = dbstore + +queues.production.processors.messages = dbstore + +# Select the database driver. This all depends on the +# loaded plugins. +queues.production.processors.messages.dbstore.driver = mysql + +# Defines the read connection sent to the clients +queues.production.processors.messages.dbstore.read = sysop:sysop@localhost/seiscomp + +# Defines the write connection for the plugin. This line +# will not be published and only be used internally. +queues.production.processors.messages.dbstore.write = sysop:sysop@localhost/seiscomp diff --git a/etc/scvoice.cfg b/etc/scvoice.cfg new file mode 100644 index 0000000..eedf786 --- /dev/null +++ b/etc/scvoice.cfg @@ -0,0 +1,9 @@ +# Specify the script to be called if a preliminary origin arrives, latitude and +# longitude are passed as parameters $1 and $2. +scripts.alert = @DATADIR@/scvoice/alert.sh + +# Specify the script to be called when an event has been declared; the message +# string, a flag (1=new event, 0=update event), the EventID, the arrival count +# and the magnitude (optional when set) are passed as parameter $1, $2, $3, $4 +# and $5. +scripts.event = @DATADIR@/scvoice/event.sh diff --git a/etc/slarchive.cfg b/etc/slarchive.cfg new file mode 100644 index 0000000..e750533 --- /dev/null +++ b/etc/slarchive.cfg @@ -0,0 +1,3 @@ +# Path to waveform archive where all data is stored. Relative paths (as the +# default) are treated relative to the installation directory ($SEISCOMP_ROOT). +archive = /home/data/archive diff --git a/include/libbson-1.0/bson.h b/include/libbson-1.0/bson.h new file mode 100644 index 0000000..cfe6d06 --- /dev/null +++ b/include/libbson-1.0/bson.h @@ -0,0 +1,18 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Including bson.h is superseded. Use bson/bson.h instead. */ +#include "bson/bson.h" \ No newline at end of file diff --git a/include/libbson-1.0/bson/bcon.h b/include/libbson-1.0/bson/bcon.h new file mode 100644 index 0000000..8dea1fa --- /dev/null +++ b/include/libbson-1.0/bson/bcon.h @@ -0,0 +1,295 @@ +/* + * @file bcon.h + * @brief BCON (BSON C Object Notation) Declarations + */ + +#include "bson/bson-prelude.h" + +/* Copyright 2009-2013 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef BCON_H_ +#define BCON_H_ + +#include "bson/bson.h" + + +BSON_BEGIN_DECLS + + +#define BCON_STACK_MAX 100 + +#define BCON_ENSURE_DECLARE(fun, type) \ + static BSON_INLINE type bcon_ensure_##fun (type _t) \ + { \ + return _t; \ + } + +#define BCON_ENSURE(fun, val) bcon_ensure_##fun (val) + +#define BCON_ENSURE_STORAGE(fun, val) bcon_ensure_##fun (&(val)) + +BCON_ENSURE_DECLARE (const_char_ptr, const char *) +BCON_ENSURE_DECLARE (const_char_ptr_ptr, const char **) +BCON_ENSURE_DECLARE (double, double) +BCON_ENSURE_DECLARE (double_ptr, double *) +BCON_ENSURE_DECLARE (const_bson_ptr, const bson_t *) +BCON_ENSURE_DECLARE (bson_ptr, bson_t *) +BCON_ENSURE_DECLARE (subtype, bson_subtype_t) +BCON_ENSURE_DECLARE (subtype_ptr, bson_subtype_t *) +BCON_ENSURE_DECLARE (const_uint8_ptr, const uint8_t *) +BCON_ENSURE_DECLARE (const_uint8_ptr_ptr, const uint8_t **) +BCON_ENSURE_DECLARE (uint32, uint32_t) +BCON_ENSURE_DECLARE (uint32_ptr, uint32_t *) +BCON_ENSURE_DECLARE (const_oid_ptr, const bson_oid_t *) +BCON_ENSURE_DECLARE (const_oid_ptr_ptr, const bson_oid_t **) +BCON_ENSURE_DECLARE (int32, int32_t) +BCON_ENSURE_DECLARE (int32_ptr, int32_t *) +BCON_ENSURE_DECLARE (int64, int64_t) +BCON_ENSURE_DECLARE (int64_ptr, int64_t *) +BCON_ENSURE_DECLARE (const_decimal128_ptr, const bson_decimal128_t *) +BCON_ENSURE_DECLARE (bool, bool) +BCON_ENSURE_DECLARE (bool_ptr, bool *) +BCON_ENSURE_DECLARE (bson_type, bson_type_t) +BCON_ENSURE_DECLARE (bson_iter_ptr, bson_iter_t *) +BCON_ENSURE_DECLARE (const_bson_iter_ptr, const bson_iter_t *) + +#define BCON_UTF8(_val) \ + BCON_MAGIC, BCON_TYPE_UTF8, BCON_ENSURE (const_char_ptr, (_val)) +#define BCON_DOUBLE(_val) \ + BCON_MAGIC, BCON_TYPE_DOUBLE, BCON_ENSURE (double, (_val)) +#define BCON_DOCUMENT(_val) \ + BCON_MAGIC, BCON_TYPE_DOCUMENT, BCON_ENSURE (const_bson_ptr, (_val)) +#define BCON_ARRAY(_val) \ + BCON_MAGIC, BCON_TYPE_ARRAY, BCON_ENSURE (const_bson_ptr, (_val)) +#define BCON_BIN(_subtype, _binary, _length) \ + BCON_MAGIC, BCON_TYPE_BIN, BCON_ENSURE (subtype, (_subtype)), \ + BCON_ENSURE (const_uint8_ptr, (_binary)), \ + BCON_ENSURE (uint32, (_length)) +#define BCON_UNDEFINED BCON_MAGIC, BCON_TYPE_UNDEFINED +#define BCON_OID(_val) \ + BCON_MAGIC, BCON_TYPE_OID, BCON_ENSURE (const_oid_ptr, (_val)) +#define BCON_BOOL(_val) BCON_MAGIC, BCON_TYPE_BOOL, BCON_ENSURE (bool, (_val)) +#define BCON_DATE_TIME(_val) \ + BCON_MAGIC, BCON_TYPE_DATE_TIME, BCON_ENSURE (int64, (_val)) +#define BCON_NULL BCON_MAGIC, BCON_TYPE_NULL +#define BCON_REGEX(_regex, _flags) \ + BCON_MAGIC, BCON_TYPE_REGEX, BCON_ENSURE (const_char_ptr, (_regex)), \ + BCON_ENSURE (const_char_ptr, (_flags)) +#define BCON_DBPOINTER(_collection, _oid) \ + BCON_MAGIC, BCON_TYPE_DBPOINTER, \ + BCON_ENSURE (const_char_ptr, (_collection)), \ + BCON_ENSURE (const_oid_ptr, (_oid)) +#define BCON_CODE(_val) \ + BCON_MAGIC, BCON_TYPE_CODE, BCON_ENSURE (const_char_ptr, (_val)) +#define BCON_SYMBOL(_val) \ + BCON_MAGIC, BCON_TYPE_SYMBOL, BCON_ENSURE (const_char_ptr, (_val)) +#define BCON_CODEWSCOPE(_js, _scope) \ + BCON_MAGIC, BCON_TYPE_CODEWSCOPE, BCON_ENSURE (const_char_ptr, (_js)), \ + BCON_ENSURE (const_bson_ptr, (_scope)) +#define BCON_INT32(_val) \ + BCON_MAGIC, BCON_TYPE_INT32, BCON_ENSURE (int32, (_val)) +#define BCON_TIMESTAMP(_timestamp, _increment) \ + BCON_MAGIC, BCON_TYPE_TIMESTAMP, BCON_ENSURE (int32, (_timestamp)), \ + BCON_ENSURE (int32, (_increment)) +#define BCON_INT64(_val) \ + BCON_MAGIC, BCON_TYPE_INT64, BCON_ENSURE (int64, (_val)) +#define BCON_DECIMAL128(_val) \ + BCON_MAGIC, BCON_TYPE_DECIMAL128, BCON_ENSURE (const_decimal128_ptr, (_val)) +#define BCON_MAXKEY BCON_MAGIC, BCON_TYPE_MAXKEY +#define BCON_MINKEY BCON_MAGIC, BCON_TYPE_MINKEY +#define BCON(_val) \ + BCON_MAGIC, BCON_TYPE_BCON, BCON_ENSURE (const_bson_ptr, (_val)) +#define BCON_ITER(_val) \ + BCON_MAGIC, BCON_TYPE_ITER, BCON_ENSURE (const_bson_iter_ptr, (_val)) + +#define BCONE_UTF8(_val) \ + BCONE_MAGIC, BCON_TYPE_UTF8, BCON_ENSURE_STORAGE (const_char_ptr_ptr, (_val)) +#define BCONE_DOUBLE(_val) \ + BCONE_MAGIC, BCON_TYPE_DOUBLE, BCON_ENSURE_STORAGE (double_ptr, (_val)) +#define BCONE_DOCUMENT(_val) \ + BCONE_MAGIC, BCON_TYPE_DOCUMENT, BCON_ENSURE_STORAGE (bson_ptr, (_val)) +#define BCONE_ARRAY(_val) \ + BCONE_MAGIC, BCON_TYPE_ARRAY, BCON_ENSURE_STORAGE (bson_ptr, (_val)) +#define BCONE_BIN(subtype, binary, length) \ + BCONE_MAGIC, BCON_TYPE_BIN, BCON_ENSURE_STORAGE (subtype_ptr, (subtype)), \ + BCON_ENSURE_STORAGE (const_uint8_ptr_ptr, (binary)), \ + BCON_ENSURE_STORAGE (uint32_ptr, (length)) +#define BCONE_UNDEFINED BCONE_MAGIC, BCON_TYPE_UNDEFINED +#define BCONE_OID(_val) \ + BCONE_MAGIC, BCON_TYPE_OID, BCON_ENSURE_STORAGE (const_oid_ptr_ptr, (_val)) +#define BCONE_BOOL(_val) \ + BCONE_MAGIC, BCON_TYPE_BOOL, BCON_ENSURE_STORAGE (bool_ptr, (_val)) +#define BCONE_DATE_TIME(_val) \ + BCONE_MAGIC, BCON_TYPE_DATE_TIME, BCON_ENSURE_STORAGE (int64_ptr, (_val)) +#define BCONE_NULL BCONE_MAGIC, BCON_TYPE_NULL +#define BCONE_REGEX(_regex, _flags) \ + BCONE_MAGIC, BCON_TYPE_REGEX, \ + BCON_ENSURE_STORAGE (const_char_ptr_ptr, (_regex)), \ + BCON_ENSURE_STORAGE (const_char_ptr_ptr, (_flags)) +#define BCONE_DBPOINTER(_collection, _oid) \ + BCONE_MAGIC, BCON_TYPE_DBPOINTER, \ + BCON_ENSURE_STORAGE (const_char_ptr_ptr, (_collection)), \ + BCON_ENSURE_STORAGE (const_oid_ptr_ptr, (_oid)) +#define BCONE_CODE(_val) \ + BCONE_MAGIC, BCON_TYPE_CODE, BCON_ENSURE_STORAGE (const_char_ptr_ptr, (_val)) +#define BCONE_SYMBOL(_val) \ + BCONE_MAGIC, BCON_TYPE_SYMBOL, \ + BCON_ENSURE_STORAGE (const_char_ptr_ptr, (_val)) +#define BCONE_CODEWSCOPE(_js, _scope) \ + BCONE_MAGIC, BCON_TYPE_CODEWSCOPE, \ + BCON_ENSURE_STORAGE (const_char_ptr_ptr, (_js)), \ + BCON_ENSURE_STORAGE (bson_ptr, (_scope)) +#define BCONE_INT32(_val) \ + BCONE_MAGIC, BCON_TYPE_INT32, BCON_ENSURE_STORAGE (int32_ptr, (_val)) +#define BCONE_TIMESTAMP(_timestamp, _increment) \ + BCONE_MAGIC, BCON_TYPE_TIMESTAMP, \ + BCON_ENSURE_STORAGE (int32_ptr, (_timestamp)), \ + BCON_ENSURE_STORAGE (int32_ptr, (_increment)) +#define BCONE_INT64(_val) \ + BCONE_MAGIC, BCON_TYPE_INT64, BCON_ENSURE_STORAGE (int64_ptr, (_val)) +#define BCONE_DECIMAL128(_val) \ + BCONE_MAGIC, BCON_TYPE_DECIMAL128, \ + BCON_ENSURE_STORAGE (const_decimal128_ptr, (_val)) +#define BCONE_MAXKEY BCONE_MAGIC, BCON_TYPE_MAXKEY +#define BCONE_MINKEY BCONE_MAGIC, BCON_TYPE_MINKEY +#define BCONE_SKIP(_val) \ + BCONE_MAGIC, BCON_TYPE_SKIP, BCON_ENSURE (bson_type, (_val)) +#define BCONE_ITER(_val) \ + BCONE_MAGIC, BCON_TYPE_ITER, BCON_ENSURE_STORAGE (bson_iter_ptr, (_val)) + +#define BCON_MAGIC bson_bcon_magic () +#define BCONE_MAGIC bson_bcone_magic () + +typedef enum { + BCON_TYPE_UTF8, + BCON_TYPE_DOUBLE, + BCON_TYPE_DOCUMENT, + BCON_TYPE_ARRAY, + BCON_TYPE_BIN, + BCON_TYPE_UNDEFINED, + BCON_TYPE_OID, + BCON_TYPE_BOOL, + BCON_TYPE_DATE_TIME, + BCON_TYPE_NULL, + BCON_TYPE_REGEX, + BCON_TYPE_DBPOINTER, + BCON_TYPE_CODE, + BCON_TYPE_SYMBOL, + BCON_TYPE_CODEWSCOPE, + BCON_TYPE_INT32, + BCON_TYPE_TIMESTAMP, + BCON_TYPE_INT64, + BCON_TYPE_DECIMAL128, + BCON_TYPE_MAXKEY, + BCON_TYPE_MINKEY, + BCON_TYPE_BCON, + BCON_TYPE_ARRAY_START, + BCON_TYPE_ARRAY_END, + BCON_TYPE_DOC_START, + BCON_TYPE_DOC_END, + BCON_TYPE_END, + BCON_TYPE_RAW, + BCON_TYPE_SKIP, + BCON_TYPE_ITER, + BCON_TYPE_ERROR, +} bcon_type_t; + +typedef struct bcon_append_ctx_frame { + int i; + bool is_array; + bson_t bson; +} bcon_append_ctx_frame_t; + +typedef struct bcon_extract_ctx_frame { + int i; + bool is_array; + bson_iter_t iter; +} bcon_extract_ctx_frame_t; + +typedef struct _bcon_append_ctx_t { + bcon_append_ctx_frame_t stack[BCON_STACK_MAX]; + int n; +} bcon_append_ctx_t; + +typedef struct _bcon_extract_ctx_t { + bcon_extract_ctx_frame_t stack[BCON_STACK_MAX]; + int n; +} bcon_extract_ctx_t; + +BSON_EXPORT (void) +bcon_append (bson_t *bson, ...) BSON_GNUC_NULL_TERMINATED; +BSON_EXPORT (void) +bcon_append_ctx (bson_t *bson, + bcon_append_ctx_t *ctx, + ...) BSON_GNUC_NULL_TERMINATED; +BSON_EXPORT (void) +bcon_append_ctx_va (bson_t *bson, bcon_append_ctx_t *ctx, va_list *va); +BSON_EXPORT (void) +bcon_append_ctx_init (bcon_append_ctx_t *ctx); + +BSON_EXPORT (void) +bcon_extract_ctx_init (bcon_extract_ctx_t *ctx); + +BSON_EXPORT (void) +bcon_extract_ctx (bson_t *bson, + bcon_extract_ctx_t *ctx, + ...) BSON_GNUC_NULL_TERMINATED; + +BSON_EXPORT (bool) +bcon_extract_ctx_va (bson_t *bson, bcon_extract_ctx_t *ctx, va_list *ap); + +BSON_EXPORT (bool) +bcon_extract (bson_t *bson, ...) BSON_GNUC_NULL_TERMINATED; + +BSON_EXPORT (bool) +bcon_extract_va (bson_t *bson, + bcon_extract_ctx_t *ctx, + ...) BSON_GNUC_NULL_TERMINATED; + +BSON_EXPORT (bson_t *) +bcon_new (void *unused, ...) BSON_GNUC_NULL_TERMINATED; + +/** + * The bcon_..() functions are all declared with __attribute__((sentinel)). + * + * From GCC manual for "sentinel": "A valid NULL in this context is defined as + * zero with any pointer type. If your system defines the NULL macro with an + * integer type then you need to add an explicit cast." + * Case in point: GCC on Solaris (at least) + */ +#define BCON_APPEND(_bson, ...) \ + bcon_append ((_bson), __VA_ARGS__, (void *) NULL) +#define BCON_APPEND_CTX(_bson, _ctx, ...) \ + bcon_append_ctx ((_bson), (_ctx), __VA_ARGS__, (void *) NULL) + +#define BCON_EXTRACT(_bson, ...) \ + bcon_extract ((_bson), __VA_ARGS__, (void *) NULL) + +#define BCON_EXTRACT_CTX(_bson, _ctx, ...) \ + bcon_extract ((_bson), (_ctx), __VA_ARGS__, (void *) NULL) + +#define BCON_NEW(...) bcon_new (NULL, __VA_ARGS__, (void *) NULL) + +BSON_EXPORT (const char *) +bson_bcon_magic (void) BSON_GNUC_PURE; +BSON_EXPORT (const char *) +bson_bcone_magic (void) BSON_GNUC_PURE; + + +BSON_END_DECLS + + +#endif diff --git a/include/libbson-1.0/bson/bson-atomic.h b/include/libbson-1.0/bson/bson-atomic.h new file mode 100644 index 0000000..96af5b3 --- /dev/null +++ b/include/libbson-1.0/bson/bson-atomic.h @@ -0,0 +1,105 @@ +/* + * Copyright 2013-2014 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_ATOMIC_H +#define BSON_ATOMIC_H + + +#include "bson/bson-config.h" +#include "bson/bson-compat.h" +#include "bson/bson-macros.h" + + +BSON_BEGIN_DECLS + + +#if defined(__sun) && defined(__SVR4) +/* Solaris */ +#include +#define bson_atomic_int_add(p, v) \ + atomic_add_32_nv ((volatile uint32_t *) p, (v)) +#define bson_atomic_int64_add(p, v) \ + atomic_add_64_nv ((volatile uint64_t *) p, (v)) +#elif defined(_WIN32) +/* MSVC/MinGW */ +#define bson_atomic_int_add(p, v) \ + (InterlockedExchangeAdd ((volatile LONG *) (p), (LONG) (v)) + (LONG) (v)) +#define bson_atomic_int64_add(p, v) \ + (InterlockedExchangeAdd64 ((volatile LONGLONG *) (p), (LONGLONG) (v)) + \ + (LONGLONG) (v)) +#else +#ifdef BSON_HAVE_ATOMIC_32_ADD_AND_FETCH +#define bson_atomic_int_add(p, v) __sync_add_and_fetch ((p), (v)) +#else +#define __BSON_NEED_ATOMIC_32 +#endif +#ifdef BSON_HAVE_ATOMIC_64_ADD_AND_FETCH +#if BSON_GNUC_IS_VERSION(4, 1) +/* + * GCC 4.1 on i386 can generate buggy 64-bit atomic increment. + * So we will work around with a fallback. + * + * https://gcc.gnu.org/bugzilla/show_bug.cgi?id=40693 + */ +#define __BSON_NEED_ATOMIC_64 +#else +#define bson_atomic_int64_add(p, v) \ + __sync_add_and_fetch ((volatile int64_t *) (p), (int64_t) (v)) +#endif +#else +#define __BSON_NEED_ATOMIC_64 +#endif +#endif + +#ifdef __BSON_NEED_ATOMIC_32 +BSON_EXPORT (int32_t) +bson_atomic_int_add (volatile int32_t *p, int32_t n); +#endif +#ifdef __BSON_NEED_ATOMIC_64 +BSON_EXPORT (int64_t) +bson_atomic_int64_add (volatile int64_t *p, int64_t n); +#endif + + +#if defined(_WIN32) +#define bson_memory_barrier() MemoryBarrier () +#elif defined(__GNUC__) +#if BSON_GNUC_CHECK_VERSION(4, 1) +#define bson_memory_barrier() __sync_synchronize () +#else +#warning "GCC Pre-4.1 discovered, using inline assembly for memory barrier." +#define bson_memory_barrier() __asm__ volatile("" ::: "memory") +#endif +#elif defined(__SUNPRO_C) +#include +#define bson_memory_barrier() __machine_rw_barrier () +#elif defined(__xlC__) +#define bson_memory_barrier() __sync () +#else +#define __BSON_NEED_BARRIER 1 +#warning "Unknown compiler, using lock for compiler barrier." +BSON_EXPORT (void) +bson_memory_barrier (void); +#endif + + +BSON_END_DECLS + + +#endif /* BSON_ATOMIC_H */ diff --git a/include/libbson-1.0/bson/bson-clock.h b/include/libbson-1.0/bson/bson-clock.h new file mode 100644 index 0000000..bd33104 --- /dev/null +++ b/include/libbson-1.0/bson/bson-clock.h @@ -0,0 +1,41 @@ +/* + * Copyright 2014 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_CLOCK_H +#define BSON_CLOCK_H + + +#include "bson/bson-compat.h" +#include "bson/bson-macros.h" +#include "bson/bson-types.h" + + +BSON_BEGIN_DECLS + + +BSON_EXPORT (int64_t) +bson_get_monotonic_time (void); +BSON_EXPORT (int) +bson_gettimeofday (struct timeval *tv); + + +BSON_END_DECLS + + +#endif /* BSON_CLOCK_H */ diff --git a/include/libbson-1.0/bson/bson-compat.h b/include/libbson-1.0/bson/bson-compat.h new file mode 100644 index 0000000..5da1e60 --- /dev/null +++ b/include/libbson-1.0/bson/bson-compat.h @@ -0,0 +1,177 @@ +/* + * Copyright 2013 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_COMPAT_H +#define BSON_COMPAT_H + + +#if defined(__MINGW32__) +#if defined(__USE_MINGW_ANSI_STDIO) +#if __USE_MINGW_ANSI_STDIO < 1 +#error "__USE_MINGW_ANSI_STDIO > 0 is required for correct PRI* macros" +#endif +#else +#define __USE_MINGW_ANSI_STDIO 1 +#endif +#endif + +#include "bson/bson-config.h" +#include "bson/bson-macros.h" + + +#ifdef BSON_OS_WIN32 +#if defined(_WIN32_WINNT) && (_WIN32_WINNT < 0x0600) +#undef _WIN32_WINNT +#endif +#ifndef _WIN32_WINNT +#define _WIN32_WINNT 0x0600 +#endif +#ifndef NOMINMAX +#define NOMINMAX +#endif +#include +#ifndef WIN32_LEAN_AND_MEAN +#define WIN32_LEAN_AND_MEAN +#include +#undef WIN32_LEAN_AND_MEAN +#else +#include +#endif +#include +#include +#endif + + +#ifdef BSON_OS_UNIX +#include +#include +#endif + + +#include "bson/bson-macros.h" + + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +BSON_BEGIN_DECLS + +#if !defined(_MSC_VER) || (_MSC_VER >= 1800) +#include +#endif +#ifdef _MSC_VER +#ifndef __cplusplus +/* benign redefinition of type */ +#pragma warning(disable : 4142) +#ifndef _SSIZE_T_DEFINED +#define _SSIZE_T_DEFINED +typedef SSIZE_T ssize_t; +#endif +#ifndef _SIZE_T_DEFINED +#define _SIZE_T_DEFINED +typedef SIZE_T size_t; +#endif +#pragma warning(default : 4142) +#else +/* + * MSVC++ does not include ssize_t, just size_t. + * So we need to synthesize that as well. + */ +#pragma warning(disable : 4142) +#ifndef _SSIZE_T_DEFINED +#define _SSIZE_T_DEFINED +typedef SSIZE_T ssize_t; +#endif +#pragma warning(default : 4142) +#endif +#ifndef PRIi32 +#define PRIi32 "d" +#endif +#ifndef PRId32 +#define PRId32 "d" +#endif +#ifndef PRIu32 +#define PRIu32 "u" +#endif +#ifndef PRIi64 +#define PRIi64 "I64i" +#endif +#ifndef PRId64 +#define PRId64 "I64i" +#endif +#ifndef PRIu64 +#define PRIu64 "I64u" +#endif +#endif + +#if defined(__MINGW32__) && !defined(INIT_ONCE_STATIC_INIT) +#define INIT_ONCE_STATIC_INIT RTL_RUN_ONCE_INIT +typedef RTL_RUN_ONCE INIT_ONCE; +#endif + +#ifdef BSON_HAVE_STDBOOL_H +#include +#elif !defined(__bool_true_false_are_defined) +#ifndef __cplusplus +typedef signed char bool; +#define false 0 +#define true 1 +#endif +#define __bool_true_false_are_defined 1 +#endif + + +#if defined(__GNUC__) +#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 1) +#define bson_sync_synchronize() __sync_synchronize () +#elif defined(__i386__) || defined(__i486__) || defined(__i586__) || \ + defined(__i686__) || defined(__x86_64__) +#define bson_sync_synchronize() asm volatile("mfence" ::: "memory") +#else +#define bson_sync_synchronize() asm volatile("sync" ::: "memory") +#endif +#elif defined(_MSC_VER) +#define bson_sync_synchronize() MemoryBarrier () +#endif + + +#if !defined(va_copy) && defined(__va_copy) +#define va_copy(dst, src) __va_copy (dst, src) +#endif + + +#if !defined(va_copy) +#define va_copy(dst, src) ((dst) = (src)) +#endif + + +BSON_END_DECLS + + +#endif /* BSON_COMPAT_H */ diff --git a/include/libbson-1.0/bson/bson-config.h b/include/libbson-1.0/bson/bson-config.h new file mode 100644 index 0000000..8a64bcf --- /dev/null +++ b/include/libbson-1.0/bson/bson-config.h @@ -0,0 +1,151 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if !defined(BSON_INSIDE) && !defined(BSON_COMPILATION) +#error "Only can be included directly." +#endif + +#ifndef BSON_CONFIG_H +#define BSON_CONFIG_H + +/* + * Define to 1234 for Little Endian, 4321 for Big Endian. + */ +#define BSON_BYTE_ORDER 1234 + + +/* + * Define to 1 if you have stdbool.h + */ +#define BSON_HAVE_STDBOOL_H 1 +#if BSON_HAVE_STDBOOL_H != 1 +# undef BSON_HAVE_STDBOOL_H +#endif + + +/* + * Define to 1 for POSIX-like systems, 2 for Windows. + */ +#define BSON_OS 1 + + +/* + * Define to 1 if we have access to GCC 32-bit atomic builtins. + * While this requires GCC 4.1+ in most cases, it is also architecture + * dependent. For example, some PPC or ARM systems may not have it even + * if it is a recent GCC version. + */ +#define BSON_HAVE_ATOMIC_32_ADD_AND_FETCH 1 +#if BSON_HAVE_ATOMIC_32_ADD_AND_FETCH != 1 +# undef BSON_HAVE_ATOMIC_32_ADD_AND_FETCH +#endif + +/* + * Similarly, define to 1 if we have access to GCC 64-bit atomic builtins. + */ +#define BSON_HAVE_ATOMIC_64_ADD_AND_FETCH 1 +#if BSON_HAVE_ATOMIC_64_ADD_AND_FETCH != 1 +# undef BSON_HAVE_ATOMIC_64_ADD_AND_FETCH +#endif + + +/* + * Define to 1 if you have clock_gettime() available. + */ +#define BSON_HAVE_CLOCK_GETTIME 1 +#if BSON_HAVE_CLOCK_GETTIME != 1 +# undef BSON_HAVE_CLOCK_GETTIME +#endif + + +/* + * Define to 1 if you have strings.h available on your platform. + */ +#define BSON_HAVE_STRINGS_H 1 +#if BSON_HAVE_STRINGS_H != 1 +# undef BSON_HAVE_STRINGS_H +#endif + + +/* + * Define to 1 if you have strnlen available on your platform. + */ +#define BSON_HAVE_STRNLEN 1 +#if BSON_HAVE_STRNLEN != 1 +# undef BSON_HAVE_STRNLEN +#endif + + +/* + * Define to 1 if you have snprintf available on your platform. + */ +#define BSON_HAVE_SNPRINTF 1 +#if BSON_HAVE_SNPRINTF != 1 +# undef BSON_HAVE_SNPRINTF +#endif + + +/* + * Define to 1 if you have gmtime_r available on your platform. + */ +#define BSON_HAVE_GMTIME_R 1 +#if BSON_HAVE_GMTIME_R != 1 +# undef BSON_HAVE_GMTIME_R +#endif + + +/* + * Define to 1 if you have reallocf available on your platform. + */ +#define BSON_HAVE_REALLOCF 0 +#if BSON_HAVE_REALLOCF != 1 +# undef BSON_HAVE_REALLOCF +#endif + + +/* + * Define to 1 if you have struct timespec available on your platform. + */ +#define BSON_HAVE_TIMESPEC 1 +#if BSON_HAVE_TIMESPEC != 1 +# undef BSON_HAVE_TIMESPEC +#endif + + +/* + * Define to 1 if you want extra aligned types in libbson + */ +#define BSON_EXTRA_ALIGN 0 +#if BSON_EXTRA_ALIGN != 1 +# undef BSON_EXTRA_ALIGN +#endif + + +/* + * Define to 1 if you have SYS_gettid syscall + */ +#define BSON_HAVE_SYSCALL_TID 1 +#if BSON_HAVE_SYSCALL_TID != 1 +# undef BSON_HAVE_SYSCALL_TID +#endif + +#define BSON_HAVE_RAND_R 1 +#if BSON_HAVE_RAND_R != 1 +# undef BSON_HAVE_RAND_R +#endif + + +#endif /* BSON_CONFIG_H */ diff --git a/include/libbson-1.0/bson/bson-context.h b/include/libbson-1.0/bson/bson-context.h new file mode 100644 index 0000000..7e32845 --- /dev/null +++ b/include/libbson-1.0/bson/bson-context.h @@ -0,0 +1,42 @@ +/* + * Copyright 2013 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_CONTEXT_H +#define BSON_CONTEXT_H + + +#include "bson/bson-macros.h" +#include "bson/bson-types.h" + + +BSON_BEGIN_DECLS + + +BSON_EXPORT (bson_context_t *) +bson_context_new (bson_context_flags_t flags); +BSON_EXPORT (void) +bson_context_destroy (bson_context_t *context); +BSON_EXPORT (bson_context_t *) +bson_context_get_default (void); + + +BSON_END_DECLS + + +#endif /* BSON_CONTEXT_H */ diff --git a/include/libbson-1.0/bson/bson-decimal128.h b/include/libbson-1.0/bson/bson-decimal128.h new file mode 100644 index 0000000..d87703e --- /dev/null +++ b/include/libbson-1.0/bson/bson-decimal128.h @@ -0,0 +1,64 @@ +/* + * Copyright 2015 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_DECIMAL128_H +#define BSON_DECIMAL128_H + + +#include + +#include "bson/bson-macros.h" +#include "bson/bson-config.h" +#include "bson/bson-types.h" + + +/** + * BSON_DECIMAL128_STRING: + * + * The length of a decimal128 string (with null terminator). + * + * 1 for the sign + * 35 for digits and radix + * 2 for exponent indicator and sign + * 4 for exponent digits + */ +#define BSON_DECIMAL128_STRING 43 +#define BSON_DECIMAL128_INF "Infinity" +#define BSON_DECIMAL128_NAN "NaN" + + +BSON_BEGIN_DECLS + +BSON_EXPORT (void) +bson_decimal128_to_string (const bson_decimal128_t *dec, char *str); + + +/* Note: @string must be ASCII characters only! */ +BSON_EXPORT (bool) +bson_decimal128_from_string (const char *string, bson_decimal128_t *dec); + +BSON_EXPORT (bool) +bson_decimal128_from_string_w_len (const char *string, + int len, + bson_decimal128_t *dec); + +BSON_END_DECLS + + +#endif /* BSON_DECIMAL128_H */ diff --git a/include/libbson-1.0/bson/bson-endian.h b/include/libbson-1.0/bson/bson-endian.h new file mode 100644 index 0000000..fae6d7c --- /dev/null +++ b/include/libbson-1.0/bson/bson-endian.h @@ -0,0 +1,227 @@ +/* + * Copyright 2013 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_ENDIAN_H +#define BSON_ENDIAN_H + + +#if defined(__sun) +#include +#endif + +#include "bson/bson-config.h" +#include "bson/bson-macros.h" +#include "bson/bson-compat.h" + + +BSON_BEGIN_DECLS + + +#define BSON_BIG_ENDIAN 4321 +#define BSON_LITTLE_ENDIAN 1234 + + +#if defined(__sun) +#define BSON_UINT16_SWAP_LE_BE(v) BSWAP_16 ((uint16_t) v) +#define BSON_UINT32_SWAP_LE_BE(v) BSWAP_32 ((uint32_t) v) +#define BSON_UINT64_SWAP_LE_BE(v) BSWAP_64 ((uint64_t) v) +#elif defined(__clang__) && defined(__clang_major__) && \ + defined(__clang_minor__) && (__clang_major__ >= 3) && \ + (__clang_minor__ >= 1) +#if __has_builtin(__builtin_bswap16) +#define BSON_UINT16_SWAP_LE_BE(v) __builtin_bswap16 (v) +#endif +#if __has_builtin(__builtin_bswap32) +#define BSON_UINT32_SWAP_LE_BE(v) __builtin_bswap32 (v) +#endif +#if __has_builtin(__builtin_bswap64) +#define BSON_UINT64_SWAP_LE_BE(v) __builtin_bswap64 (v) +#endif +#elif defined(__GNUC__) && (__GNUC__ >= 4) +#if __GNUC__ > 4 || (defined(__GNUC_MINOR__) && __GNUC_MINOR__ >= 3) +#define BSON_UINT32_SWAP_LE_BE(v) __builtin_bswap32 ((uint32_t) v) +#define BSON_UINT64_SWAP_LE_BE(v) __builtin_bswap64 ((uint64_t) v) +#endif +#if __GNUC__ > 4 || (defined(__GNUC_MINOR__) && __GNUC_MINOR__ >= 8) +#define BSON_UINT16_SWAP_LE_BE(v) __builtin_bswap16 ((uint32_t) v) +#endif +#endif + + +#ifndef BSON_UINT16_SWAP_LE_BE +#define BSON_UINT16_SWAP_LE_BE(v) __bson_uint16_swap_slow ((uint16_t) v) +#endif + + +#ifndef BSON_UINT32_SWAP_LE_BE +#define BSON_UINT32_SWAP_LE_BE(v) __bson_uint32_swap_slow ((uint32_t) v) +#endif + + +#ifndef BSON_UINT64_SWAP_LE_BE +#define BSON_UINT64_SWAP_LE_BE(v) __bson_uint64_swap_slow ((uint64_t) v) +#endif + + +#if BSON_BYTE_ORDER == BSON_LITTLE_ENDIAN +#define BSON_UINT16_FROM_LE(v) ((uint16_t) v) +#define BSON_UINT16_TO_LE(v) ((uint16_t) v) +#define BSON_UINT16_FROM_BE(v) BSON_UINT16_SWAP_LE_BE (v) +#define BSON_UINT16_TO_BE(v) BSON_UINT16_SWAP_LE_BE (v) +#define BSON_UINT32_FROM_LE(v) ((uint32_t) v) +#define BSON_UINT32_TO_LE(v) ((uint32_t) v) +#define BSON_UINT32_FROM_BE(v) BSON_UINT32_SWAP_LE_BE (v) +#define BSON_UINT32_TO_BE(v) BSON_UINT32_SWAP_LE_BE (v) +#define BSON_UINT64_FROM_LE(v) ((uint64_t) v) +#define BSON_UINT64_TO_LE(v) ((uint64_t) v) +#define BSON_UINT64_FROM_BE(v) BSON_UINT64_SWAP_LE_BE (v) +#define BSON_UINT64_TO_BE(v) BSON_UINT64_SWAP_LE_BE (v) +#define BSON_DOUBLE_FROM_LE(v) ((double) v) +#define BSON_DOUBLE_TO_LE(v) ((double) v) +#elif BSON_BYTE_ORDER == BSON_BIG_ENDIAN +#define BSON_UINT16_FROM_LE(v) BSON_UINT16_SWAP_LE_BE (v) +#define BSON_UINT16_TO_LE(v) BSON_UINT16_SWAP_LE_BE (v) +#define BSON_UINT16_FROM_BE(v) ((uint16_t) v) +#define BSON_UINT16_TO_BE(v) ((uint16_t) v) +#define BSON_UINT32_FROM_LE(v) BSON_UINT32_SWAP_LE_BE (v) +#define BSON_UINT32_TO_LE(v) BSON_UINT32_SWAP_LE_BE (v) +#define BSON_UINT32_FROM_BE(v) ((uint32_t) v) +#define BSON_UINT32_TO_BE(v) ((uint32_t) v) +#define BSON_UINT64_FROM_LE(v) BSON_UINT64_SWAP_LE_BE (v) +#define BSON_UINT64_TO_LE(v) BSON_UINT64_SWAP_LE_BE (v) +#define BSON_UINT64_FROM_BE(v) ((uint64_t) v) +#define BSON_UINT64_TO_BE(v) ((uint64_t) v) +#define BSON_DOUBLE_FROM_LE(v) (__bson_double_swap_slow (v)) +#define BSON_DOUBLE_TO_LE(v) (__bson_double_swap_slow (v)) +#else +#error "The endianness of target architecture is unknown." +#endif + + +/* + *-------------------------------------------------------------------------- + * + * __bson_uint16_swap_slow -- + * + * Fallback endianness conversion for 16-bit integers. + * + * Returns: + * The endian swapped version. + * + * Side effects: + * None. + * + *-------------------------------------------------------------------------- + */ + +static BSON_INLINE uint16_t +__bson_uint16_swap_slow (uint16_t v) /* IN */ +{ + return ((v & 0x00FF) << 8) | ((v & 0xFF00) >> 8); +} + + +/* + *-------------------------------------------------------------------------- + * + * __bson_uint32_swap_slow -- + * + * Fallback endianness conversion for 32-bit integers. + * + * Returns: + * The endian swapped version. + * + * Side effects: + * None. + * + *-------------------------------------------------------------------------- + */ + +static BSON_INLINE uint32_t +__bson_uint32_swap_slow (uint32_t v) /* IN */ +{ + return ((v & 0x000000FFU) << 24) | ((v & 0x0000FF00U) << 8) | + ((v & 0x00FF0000U) >> 8) | ((v & 0xFF000000U) >> 24); +} + + +/* + *-------------------------------------------------------------------------- + * + * __bson_uint64_swap_slow -- + * + * Fallback endianness conversion for 64-bit integers. + * + * Returns: + * The endian swapped version. + * + * Side effects: + * None. + * + *-------------------------------------------------------------------------- + */ + +static BSON_INLINE uint64_t +__bson_uint64_swap_slow (uint64_t v) /* IN */ +{ + return ((v & 0x00000000000000FFULL) << 56) | + ((v & 0x000000000000FF00ULL) << 40) | + ((v & 0x0000000000FF0000ULL) << 24) | + ((v & 0x00000000FF000000ULL) << 8) | + ((v & 0x000000FF00000000ULL) >> 8) | + ((v & 0x0000FF0000000000ULL) >> 24) | + ((v & 0x00FF000000000000ULL) >> 40) | + ((v & 0xFF00000000000000ULL) >> 56); +} + + +/* + *-------------------------------------------------------------------------- + * + * __bson_double_swap_slow -- + * + * Fallback endianness conversion for double floating point. + * + * Returns: + * The endian swapped version. + * + * Side effects: + * None. + * + *-------------------------------------------------------------------------- + */ + +BSON_STATIC_ASSERT2 (sizeof_uint64_t, sizeof (double) == sizeof (uint64_t)); + +static BSON_INLINE double +__bson_double_swap_slow (double v) /* IN */ +{ + uint64_t uv; + + memcpy (&uv, &v, sizeof (v)); + uv = BSON_UINT64_SWAP_LE_BE (uv); + memcpy (&v, &uv, sizeof (v)); + + return v; +} + +BSON_END_DECLS + + +#endif /* BSON_ENDIAN_H */ diff --git a/include/libbson-1.0/bson/bson-error.h b/include/libbson-1.0/bson/bson-error.h new file mode 100644 index 0000000..9189691 --- /dev/null +++ b/include/libbson-1.0/bson/bson-error.h @@ -0,0 +1,50 @@ +/* + * Copyright 2013 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_ERROR_H +#define BSON_ERROR_H + + +#include "bson/bson-compat.h" +#include "bson/bson-macros.h" +#include "bson/bson-types.h" + + +BSON_BEGIN_DECLS + + +#define BSON_ERROR_JSON 1 +#define BSON_ERROR_READER 2 +#define BSON_ERROR_INVALID 3 + + +BSON_EXPORT (void) +bson_set_error (bson_error_t *error, + uint32_t domain, + uint32_t code, + const char *format, + ...) BSON_GNUC_PRINTF (4, 5); +BSON_EXPORT (char *) +bson_strerror_r (int err_code, char *buf, size_t buflen); + + +BSON_END_DECLS + + +#endif /* BSON_ERROR_H */ diff --git a/include/libbson-1.0/bson/bson-iter.h b/include/libbson-1.0/bson/bson-iter.h new file mode 100644 index 0000000..dbdea06 --- /dev/null +++ b/include/libbson-1.0/bson/bson-iter.h @@ -0,0 +1,547 @@ +/* + * Copyright 2013 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_ITER_H +#define BSON_ITER_H + + +#include "bson/bson.h" +#include "bson/bson-endian.h" +#include "bson/bson-macros.h" +#include "bson/bson-types.h" + + +BSON_BEGIN_DECLS + + +#define BSON_ITER_HOLDS_DOUBLE(iter) \ + (bson_iter_type ((iter)) == BSON_TYPE_DOUBLE) + +#define BSON_ITER_HOLDS_UTF8(iter) (bson_iter_type ((iter)) == BSON_TYPE_UTF8) + +#define BSON_ITER_HOLDS_DOCUMENT(iter) \ + (bson_iter_type ((iter)) == BSON_TYPE_DOCUMENT) + +#define BSON_ITER_HOLDS_ARRAY(iter) (bson_iter_type ((iter)) == BSON_TYPE_ARRAY) + +#define BSON_ITER_HOLDS_BINARY(iter) \ + (bson_iter_type ((iter)) == BSON_TYPE_BINARY) + +#define BSON_ITER_HOLDS_UNDEFINED(iter) \ + (bson_iter_type ((iter)) == BSON_TYPE_UNDEFINED) + +#define BSON_ITER_HOLDS_OID(iter) (bson_iter_type ((iter)) == BSON_TYPE_OID) + +#define BSON_ITER_HOLDS_BOOL(iter) (bson_iter_type ((iter)) == BSON_TYPE_BOOL) + +#define BSON_ITER_HOLDS_DATE_TIME(iter) \ + (bson_iter_type ((iter)) == BSON_TYPE_DATE_TIME) + +#define BSON_ITER_HOLDS_NULL(iter) (bson_iter_type ((iter)) == BSON_TYPE_NULL) + +#define BSON_ITER_HOLDS_REGEX(iter) (bson_iter_type ((iter)) == BSON_TYPE_REGEX) + +#define BSON_ITER_HOLDS_DBPOINTER(iter) \ + (bson_iter_type ((iter)) == BSON_TYPE_DBPOINTER) + +#define BSON_ITER_HOLDS_CODE(iter) (bson_iter_type ((iter)) == BSON_TYPE_CODE) + +#define BSON_ITER_HOLDS_SYMBOL(iter) \ + (bson_iter_type ((iter)) == BSON_TYPE_SYMBOL) + +#define BSON_ITER_HOLDS_CODEWSCOPE(iter) \ + (bson_iter_type ((iter)) == BSON_TYPE_CODEWSCOPE) + +#define BSON_ITER_HOLDS_INT32(iter) (bson_iter_type ((iter)) == BSON_TYPE_INT32) + +#define BSON_ITER_HOLDS_TIMESTAMP(iter) \ + (bson_iter_type ((iter)) == BSON_TYPE_TIMESTAMP) + +#define BSON_ITER_HOLDS_INT64(iter) (bson_iter_type ((iter)) == BSON_TYPE_INT64) + +#define BSON_ITER_HOLDS_DECIMAL128(iter) \ + (bson_iter_type ((iter)) == BSON_TYPE_DECIMAL128) + +#define BSON_ITER_HOLDS_MAXKEY(iter) \ + (bson_iter_type ((iter)) == BSON_TYPE_MAXKEY) + +#define BSON_ITER_HOLDS_MINKEY(iter) \ + (bson_iter_type ((iter)) == BSON_TYPE_MINKEY) + +#define BSON_ITER_HOLDS_INT(iter) \ + (BSON_ITER_HOLDS_INT32 (iter) || BSON_ITER_HOLDS_INT64 (iter)) + +#define BSON_ITER_HOLDS_NUMBER(iter) \ + (BSON_ITER_HOLDS_INT (iter) || BSON_ITER_HOLDS_DOUBLE (iter)) + +#define BSON_ITER_IS_KEY(iter, key) \ + (0 == strcmp ((key), bson_iter_key ((iter)))) + + +BSON_EXPORT (const bson_value_t *) +bson_iter_value (bson_iter_t *iter); + + +/** + * bson_iter_utf8_len_unsafe: + * @iter: a bson_iter_t. + * + * Returns the length of a string currently pointed to by @iter. This performs + * no validation so the is responsible for knowing the BSON is valid. Calling + * bson_validate() is one way to do this ahead of time. + */ +static BSON_INLINE uint32_t +bson_iter_utf8_len_unsafe (const bson_iter_t *iter) +{ + int32_t val; + + memcpy (&val, iter->raw + iter->d1, sizeof (val)); + val = BSON_UINT32_FROM_LE (val); + return BSON_MAX (0, val - 1); +} + + +BSON_EXPORT (void) +bson_iter_array (const bson_iter_t *iter, + uint32_t *array_len, + const uint8_t **array); + + +BSON_EXPORT (void) +bson_iter_binary (const bson_iter_t *iter, + bson_subtype_t *subtype, + uint32_t *binary_len, + const uint8_t **binary); + + +BSON_EXPORT (const char *) +bson_iter_code (const bson_iter_t *iter, uint32_t *length); + + +/** + * bson_iter_code_unsafe: + * @iter: A bson_iter_t. + * @length: A location for the length of the resulting string. + * + * Like bson_iter_code() but performs no integrity checks. + * + * Returns: A string that should not be modified or freed. + */ +static BSON_INLINE const char * +bson_iter_code_unsafe (const bson_iter_t *iter, uint32_t *length) +{ + *length = bson_iter_utf8_len_unsafe (iter); + return (const char *) (iter->raw + iter->d2); +} + + +BSON_EXPORT (const char *) +bson_iter_codewscope (const bson_iter_t *iter, + uint32_t *length, + uint32_t *scope_len, + const uint8_t **scope); + + +BSON_EXPORT (void) +bson_iter_dbpointer (const bson_iter_t *iter, + uint32_t *collection_len, + const char **collection, + const bson_oid_t **oid); + + +BSON_EXPORT (void) +bson_iter_document (const bson_iter_t *iter, + uint32_t *document_len, + const uint8_t **document); + + +BSON_EXPORT (double) +bson_iter_double (const bson_iter_t *iter); + +BSON_EXPORT (double) +bson_iter_as_double (const bson_iter_t *iter); + +/** + * bson_iter_double_unsafe: + * @iter: A bson_iter_t. + * + * Similar to bson_iter_double() but does not perform an integrity checking. + * + * Returns: A double. + */ +static BSON_INLINE double +bson_iter_double_unsafe (const bson_iter_t *iter) +{ + double val; + + memcpy (&val, iter->raw + iter->d1, sizeof (val)); + return BSON_DOUBLE_FROM_LE (val); +} + + +BSON_EXPORT (bool) +bson_iter_init (bson_iter_t *iter, const bson_t *bson); + +BSON_EXPORT (bool) +bson_iter_init_from_data (bson_iter_t *iter, + const uint8_t *data, + size_t length); + + +BSON_EXPORT (bool) +bson_iter_init_find (bson_iter_t *iter, const bson_t *bson, const char *key); + + +BSON_EXPORT (bool) +bson_iter_init_find_w_len (bson_iter_t *iter, + const bson_t *bson, + const char *key, + int keylen); + + +BSON_EXPORT (bool) +bson_iter_init_find_case (bson_iter_t *iter, + const bson_t *bson, + const char *key); + +BSON_EXPORT (bool) +bson_iter_init_from_data_at_offset (bson_iter_t *iter, + const uint8_t *data, + size_t length, + uint32_t offset, + uint32_t keylen); + +BSON_EXPORT (int32_t) +bson_iter_int32 (const bson_iter_t *iter); + + +/** + * bson_iter_int32_unsafe: + * @iter: A bson_iter_t. + * + * Similar to bson_iter_int32() but with no integrity checking. + * + * Returns: A 32-bit signed integer. + */ +static BSON_INLINE int32_t +bson_iter_int32_unsafe (const bson_iter_t *iter) +{ + int32_t val; + + memcpy (&val, iter->raw + iter->d1, sizeof (val)); + return BSON_UINT32_FROM_LE (val); +} + + +BSON_EXPORT (int64_t) +bson_iter_int64 (const bson_iter_t *iter); + + +BSON_EXPORT (int64_t) +bson_iter_as_int64 (const bson_iter_t *iter); + + +/** + * bson_iter_int64_unsafe: + * @iter: a bson_iter_t. + * + * Similar to bson_iter_int64() but without integrity checking. + * + * Returns: A 64-bit signed integer. + */ +static BSON_INLINE int64_t +bson_iter_int64_unsafe (const bson_iter_t *iter) +{ + int64_t val; + + memcpy (&val, iter->raw + iter->d1, sizeof (val)); + return BSON_UINT64_FROM_LE (val); +} + + +BSON_EXPORT (bool) +bson_iter_find (bson_iter_t *iter, const char *key); + + +BSON_EXPORT (bool) +bson_iter_find_w_len (bson_iter_t *iter, const char *key, int keylen); + + +BSON_EXPORT (bool) +bson_iter_find_case (bson_iter_t *iter, const char *key); + + +BSON_EXPORT (bool) +bson_iter_find_descendant (bson_iter_t *iter, + const char *dotkey, + bson_iter_t *descendant); + + +BSON_EXPORT (bool) +bson_iter_next (bson_iter_t *iter); + + +BSON_EXPORT (const bson_oid_t *) +bson_iter_oid (const bson_iter_t *iter); + + +/** + * bson_iter_oid_unsafe: + * @iter: A #bson_iter_t. + * + * Similar to bson_iter_oid() but performs no integrity checks. + * + * Returns: A #bson_oid_t that should not be modified or freed. + */ +static BSON_INLINE const bson_oid_t * +bson_iter_oid_unsafe (const bson_iter_t *iter) +{ + return (const bson_oid_t *) (iter->raw + iter->d1); +} + + +BSON_EXPORT (bool) +bson_iter_decimal128 (const bson_iter_t *iter, bson_decimal128_t *dec); + + +/** + * bson_iter_decimal128_unsafe: + * @iter: A #bson_iter_t. + * + * Similar to bson_iter_decimal128() but performs no integrity checks. + * + * Returns: A #bson_decimal128_t. + */ +static BSON_INLINE void +bson_iter_decimal128_unsafe (const bson_iter_t *iter, bson_decimal128_t *dec) +{ + uint64_t low_le; + uint64_t high_le; + + memcpy (&low_le, iter->raw + iter->d1, sizeof (low_le)); + memcpy (&high_le, iter->raw + iter->d1 + 8, sizeof (high_le)); + + dec->low = BSON_UINT64_FROM_LE (low_le); + dec->high = BSON_UINT64_FROM_LE (high_le); +} + + +BSON_EXPORT (const char *) +bson_iter_key (const bson_iter_t *iter); + +BSON_EXPORT (uint32_t) +bson_iter_key_len (const bson_iter_t *iter); + + +/** + * bson_iter_key_unsafe: + * @iter: A bson_iter_t. + * + * Similar to bson_iter_key() but performs no integrity checking. + * + * Returns: A string that should not be modified or freed. + */ +static BSON_INLINE const char * +bson_iter_key_unsafe (const bson_iter_t *iter) +{ + return (const char *) (iter->raw + iter->key); +} + + +BSON_EXPORT (const char *) +bson_iter_utf8 (const bson_iter_t *iter, uint32_t *length); + + +/** + * bson_iter_utf8_unsafe: + * + * Similar to bson_iter_utf8() but performs no integrity checking. + * + * Returns: A string that should not be modified or freed. + */ +static BSON_INLINE const char * +bson_iter_utf8_unsafe (const bson_iter_t *iter, size_t *length) +{ + *length = bson_iter_utf8_len_unsafe (iter); + return (const char *) (iter->raw + iter->d2); +} + + +BSON_EXPORT (char *) +bson_iter_dup_utf8 (const bson_iter_t *iter, uint32_t *length); + + +BSON_EXPORT (int64_t) +bson_iter_date_time (const bson_iter_t *iter); + + +BSON_EXPORT (time_t) +bson_iter_time_t (const bson_iter_t *iter); + + +/** + * bson_iter_time_t_unsafe: + * @iter: A bson_iter_t. + * + * Similar to bson_iter_time_t() but performs no integrity checking. + * + * Returns: A time_t containing the number of seconds since UNIX epoch + * in UTC. + */ +static BSON_INLINE time_t +bson_iter_time_t_unsafe (const bson_iter_t *iter) +{ + return (time_t) (bson_iter_int64_unsafe (iter) / 1000UL); +} + + +BSON_EXPORT (void) +bson_iter_timeval (const bson_iter_t *iter, struct timeval *tv); + + +/** + * bson_iter_timeval_unsafe: + * @iter: A bson_iter_t. + * @tv: A struct timeval. + * + * Similar to bson_iter_timeval() but performs no integrity checking. + */ +static BSON_INLINE void +bson_iter_timeval_unsafe (const bson_iter_t *iter, struct timeval *tv) +{ + int64_t value = bson_iter_int64_unsafe (iter); +#ifdef BSON_OS_WIN32 + tv->tv_sec = (long) (value / 1000); +#else + tv->tv_sec = (suseconds_t) (value / 1000); +#endif + tv->tv_usec = (value % 1000) * 1000; +} + + +BSON_EXPORT (void) +bson_iter_timestamp (const bson_iter_t *iter, + uint32_t *timestamp, + uint32_t *increment); + + +BSON_EXPORT (bool) +bson_iter_bool (const bson_iter_t *iter); + + +/** + * bson_iter_bool_unsafe: + * @iter: A bson_iter_t. + * + * Similar to bson_iter_bool() but performs no integrity checking. + * + * Returns: true or false. + */ +static BSON_INLINE bool +bson_iter_bool_unsafe (const bson_iter_t *iter) +{ + char val; + + memcpy (&val, iter->raw + iter->d1, 1); + return !!val; +} + + +BSON_EXPORT (bool) +bson_iter_as_bool (const bson_iter_t *iter); + + +BSON_EXPORT (const char *) +bson_iter_regex (const bson_iter_t *iter, const char **options); + + +BSON_EXPORT (const char *) +bson_iter_symbol (const bson_iter_t *iter, uint32_t *length); + + +BSON_EXPORT (bson_type_t) +bson_iter_type (const bson_iter_t *iter); + + +/** + * bson_iter_type_unsafe: + * @iter: A bson_iter_t. + * + * Similar to bson_iter_type() but performs no integrity checking. + * + * Returns: A bson_type_t. + */ +static BSON_INLINE bson_type_t +bson_iter_type_unsafe (const bson_iter_t *iter) +{ + return (bson_type_t) (iter->raw + iter->type)[0]; +} + + +BSON_EXPORT (bool) +bson_iter_recurse (const bson_iter_t *iter, bson_iter_t *child); + + +BSON_EXPORT (void) +bson_iter_overwrite_int32 (bson_iter_t *iter, int32_t value); + + +BSON_EXPORT (void) +bson_iter_overwrite_int64 (bson_iter_t *iter, int64_t value); + + +BSON_EXPORT (void) +bson_iter_overwrite_double (bson_iter_t *iter, double value); + + +BSON_EXPORT (void) +bson_iter_overwrite_decimal128 (bson_iter_t *iter, bson_decimal128_t *value); + + +BSON_EXPORT (void) +bson_iter_overwrite_bool (bson_iter_t *iter, bool value); + + +BSON_EXPORT (void) +bson_iter_overwrite_oid (bson_iter_t *iter, const bson_oid_t *value); + + +BSON_EXPORT (void) +bson_iter_overwrite_timestamp (bson_iter_t *iter, + uint32_t timestamp, + uint32_t increment); + + +BSON_EXPORT (void) +bson_iter_overwrite_date_time (bson_iter_t *iter, int64_t value); + + +BSON_EXPORT (bool) +bson_iter_visit_all (bson_iter_t *iter, + const bson_visitor_t *visitor, + void *data); + +BSON_EXPORT (uint32_t) +bson_iter_offset (bson_iter_t *iter); + + +BSON_END_DECLS + + +#endif /* BSON_ITER_H */ diff --git a/include/libbson-1.0/bson/bson-json.h b/include/libbson-1.0/bson/bson-json.h new file mode 100644 index 0000000..d00e62e --- /dev/null +++ b/include/libbson-1.0/bson/bson-json.h @@ -0,0 +1,73 @@ +/* + * Copyright 2014 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_JSON_H +#define BSON_JSON_H + + +#include "bson/bson.h" + + +BSON_BEGIN_DECLS + + +typedef struct _bson_json_reader_t bson_json_reader_t; + + +typedef enum { + BSON_JSON_ERROR_READ_CORRUPT_JS = 1, + BSON_JSON_ERROR_READ_INVALID_PARAM, + BSON_JSON_ERROR_READ_CB_FAILURE, +} bson_json_error_code_t; + + +typedef ssize_t (*bson_json_reader_cb) (void *handle, + uint8_t *buf, + size_t count); +typedef void (*bson_json_destroy_cb) (void *handle); + + +BSON_EXPORT (bson_json_reader_t *) +bson_json_reader_new (void *data, + bson_json_reader_cb cb, + bson_json_destroy_cb dcb, + bool allow_multiple, + size_t buf_size); +BSON_EXPORT (bson_json_reader_t *) +bson_json_reader_new_from_fd (int fd, bool close_on_destroy); +BSON_EXPORT (bson_json_reader_t *) +bson_json_reader_new_from_file (const char *filename, bson_error_t *error); +BSON_EXPORT (void) +bson_json_reader_destroy (bson_json_reader_t *reader); +BSON_EXPORT (int) +bson_json_reader_read (bson_json_reader_t *reader, + bson_t *bson, + bson_error_t *error); +BSON_EXPORT (bson_json_reader_t *) +bson_json_data_reader_new (bool allow_multiple, size_t size); +BSON_EXPORT (void) +bson_json_data_reader_ingest (bson_json_reader_t *reader, + const uint8_t *data, + size_t len); + + +BSON_END_DECLS + + +#endif /* BSON_JSON_H */ diff --git a/include/libbson-1.0/bson/bson-keys.h b/include/libbson-1.0/bson/bson-keys.h new file mode 100644 index 0000000..13a89c1 --- /dev/null +++ b/include/libbson-1.0/bson/bson-keys.h @@ -0,0 +1,41 @@ +/* + * Copyright 2013 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_KEYS_H +#define BSON_KEYS_H + + +#include "bson/bson-macros.h" +#include "bson/bson-types.h" + + +BSON_BEGIN_DECLS + + +BSON_EXPORT (size_t) +bson_uint32_to_string (uint32_t value, + const char **strptr, + char *str, + size_t size); + + +BSON_END_DECLS + + +#endif /* BSON_KEYS_H */ diff --git a/include/libbson-1.0/bson/bson-macros.h b/include/libbson-1.0/bson/bson-macros.h new file mode 100644 index 0000000..e2f09f0 --- /dev/null +++ b/include/libbson-1.0/bson/bson-macros.h @@ -0,0 +1,293 @@ +/* + * Copyright 2013 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_MACROS_H +#define BSON_MACROS_H + + +#include + +#ifdef __cplusplus +#include +#endif + +#include "bson/bson-config.h" + + +#if BSON_OS == 1 +#define BSON_OS_UNIX +#elif BSON_OS == 2 +#define BSON_OS_WIN32 +#else +#error "Unknown operating system." +#endif + + +#ifdef __cplusplus +#define BSON_BEGIN_DECLS extern "C" { +#define BSON_END_DECLS } +#else +#define BSON_BEGIN_DECLS +#define BSON_END_DECLS +#endif + + +#if defined(__GNUC__) +#define BSON_GNUC_CHECK_VERSION(major, minor) \ + ((__GNUC__ > (major)) || \ + ((__GNUC__ == (major)) && (__GNUC_MINOR__ >= (minor)))) +#else +#define BSON_GNUC_CHECK_VERSION(major, minor) 0 +#endif + + +#if defined(__GNUC__) +#define BSON_GNUC_IS_VERSION(major, minor) \ + ((__GNUC__ == (major)) && (__GNUC_MINOR__ == (minor))) +#else +#define BSON_GNUC_IS_VERSION(major, minor) 0 +#endif + + +/* Decorate public functions: + * - if BSON_STATIC, we're compiling a program that uses libbson as a static + * library, don't decorate functions + * - else if BSON_COMPILATION, we're compiling a static or shared libbson, mark + * public functions for export from the shared lib (which has no effect on + * the static lib) + * - else, we're compiling a program that uses libbson as a shared library, + * mark public functions as DLL imports for Microsoft Visual C + */ + +#ifdef _MSC_VER +/* + * Microsoft Visual C + */ +#ifdef BSON_STATIC +#define BSON_API +#elif defined(BSON_COMPILATION) +#define BSON_API __declspec(dllexport) +#else +#define BSON_API __declspec(dllimport) +#endif +#define BSON_CALL __cdecl + +#elif defined(__GNUC__) +/* + * GCC + */ +#ifdef BSON_STATIC +#define BSON_API +#elif defined(BSON_COMPILATION) +#define BSON_API __attribute__ ((visibility ("default"))) +#else +#define BSON_API +#endif +#define BSON_CALL + +#else +/* + * Other compilers + */ +#define BSON_API +#define BSON_CALL + +#endif + +#define BSON_EXPORT(type) BSON_API type BSON_CALL + + +#ifdef MIN +#define BSON_MIN MIN +#elif defined(__cplusplus) +#define BSON_MIN(a, b) ((std::min) (a, b)) +#elif defined(_MSC_VER) +#define BSON_MIN(a, b) ((a) < (b) ? (a) : (b)) +#else +#define BSON_MIN(a, b) (((a) < (b)) ? (a) : (b)) +#endif + + +#ifdef MAX +#define BSON_MAX MAX +#elif defined(__cplusplus) +#define BSON_MAX(a, b) ((std::max) (a, b)) +#elif defined(_MSC_VER) +#define BSON_MAX(a, b) ((a) > (b) ? (a) : (b)) +#else +#define BSON_MAX(a, b) (((a) > (b)) ? (a) : (b)) +#endif + + +#ifdef ABS +#define BSON_ABS ABS +#else +#define BSON_ABS(a) (((a) < 0) ? ((a) * -1) : (a)) +#endif + +#ifdef _MSC_VER +#ifdef _WIN64 +#define BSON_ALIGN_OF_PTR 8 +#else +#define BSON_ALIGN_OF_PTR 4 +#endif +#else +#define BSON_ALIGN_OF_PTR (sizeof (void *)) +#endif + +#ifdef BSON_EXTRA_ALIGN +#if defined(_MSC_VER) +#define BSON_ALIGNED_BEGIN(_N) __declspec(align (_N)) +#define BSON_ALIGNED_END(_N) +#else +#define BSON_ALIGNED_BEGIN(_N) +#define BSON_ALIGNED_END(_N) __attribute__ ((aligned (_N))) +#endif +#else +#if defined(_MSC_VER) +#define BSON_ALIGNED_BEGIN(_N) __declspec(align (BSON_ALIGN_OF_PTR)) +#define BSON_ALIGNED_END(_N) +#else +#define BSON_ALIGNED_BEGIN(_N) +#define BSON_ALIGNED_END(_N) \ + __attribute__ ( \ + (aligned ((_N) > BSON_ALIGN_OF_PTR ? BSON_ALIGN_OF_PTR : (_N)))) +#endif +#endif + + +#define bson_str_empty(s) (!s[0]) +#define bson_str_empty0(s) (!s || !s[0]) + + +#if defined(_WIN32) +#define BSON_FUNC __FUNCTION__ +#elif defined(__STDC_VERSION__) && __STDC_VERSION__ < 199901L +#define BSON_FUNC __FUNCTION__ +#else +#define BSON_FUNC __func__ +#endif + +#define BSON_ASSERT(test) \ + do { \ + if (!(BSON_LIKELY (test))) { \ + fprintf (stderr, \ + "%s:%d %s(): precondition failed: %s\n", \ + __FILE__, \ + __LINE__, \ + BSON_FUNC, \ + #test); \ + abort (); \ + } \ + } while (0) + +/* obsolete macros, preserved for compatibility */ +#define BSON_STATIC_ASSERT(s) BSON_STATIC_ASSERT_ (s, __LINE__) +#define BSON_STATIC_ASSERT_JOIN(a, b) BSON_STATIC_ASSERT_JOIN2 (a, b) +#define BSON_STATIC_ASSERT_JOIN2(a, b) a##b +#define BSON_STATIC_ASSERT_(s, l) \ + typedef char BSON_STATIC_ASSERT_JOIN (static_assert_test_, \ + __LINE__)[(s) ? 1 : -1] + +/* modern macros */ +#define BSON_STATIC_ASSERT2(_name, _s) \ + BSON_STATIC_ASSERT2_ (_s, __LINE__, _name) +#define BSON_STATIC_ASSERT_JOIN3(_a, _b, _name) \ + BSON_STATIC_ASSERT_JOIN4 (_a, _b, _name) +#define BSON_STATIC_ASSERT_JOIN4(_a, _b, _name) _a##_b##_name +#define BSON_STATIC_ASSERT2_(_s, _l, _name) \ + typedef char BSON_STATIC_ASSERT_JOIN3 ( \ + static_assert_test_, __LINE__, _name)[(_s) ? 1 : -1] + + +#if defined(__GNUC__) +#define BSON_GNUC_PURE __attribute__ ((pure)) +#define BSON_GNUC_WARN_UNUSED_RESULT __attribute__ ((warn_unused_result)) +#else +#define BSON_GNUC_PURE +#define BSON_GNUC_WARN_UNUSED_RESULT +#endif + + +#if BSON_GNUC_CHECK_VERSION(4, 0) && !defined(_WIN32) +#define BSON_GNUC_NULL_TERMINATED __attribute__ ((sentinel)) +#define BSON_GNUC_INTERNAL __attribute__ ((visibility ("hidden"))) +#else +#define BSON_GNUC_NULL_TERMINATED +#define BSON_GNUC_INTERNAL +#endif + + +#if defined(__GNUC__) +#define BSON_LIKELY(x) __builtin_expect (!!(x), 1) +#define BSON_UNLIKELY(x) __builtin_expect (!!(x), 0) +#else +#define BSON_LIKELY(v) v +#define BSON_UNLIKELY(v) v +#endif + + +#if defined(__clang__) +#define BSON_GNUC_PRINTF(f, v) __attribute__ ((format (printf, f, v))) +#elif BSON_GNUC_CHECK_VERSION(4, 4) +#define BSON_GNUC_PRINTF(f, v) __attribute__ ((format (gnu_printf, f, v))) +#else +#define BSON_GNUC_PRINTF(f, v) +#endif + + +#if defined(__LP64__) || defined(_LP64) +#define BSON_WORD_SIZE 64 +#else +#define BSON_WORD_SIZE 32 +#endif + + +#if defined(_MSC_VER) +#define BSON_INLINE __inline +#else +#define BSON_INLINE __inline__ +#endif + + +#ifdef _MSC_VER +#define BSON_ENSURE_ARRAY_PARAM_SIZE(_n) +#define BSON_TYPEOF decltype +#else +#define BSON_ENSURE_ARRAY_PARAM_SIZE(_n) static(_n) +#define BSON_TYPEOF typeof +#endif + + +#if BSON_GNUC_CHECK_VERSION(3, 1) +#define BSON_GNUC_DEPRECATED __attribute__ ((__deprecated__)) +#else +#define BSON_GNUC_DEPRECATED +#endif + + +#if BSON_GNUC_CHECK_VERSION(4, 5) +#define BSON_GNUC_DEPRECATED_FOR(f) \ + __attribute__ ((deprecated ("Use " #f " instead"))) +#else +#define BSON_GNUC_DEPRECATED_FOR(f) BSON_GNUC_DEPRECATED +#endif + + +#endif /* BSON_MACROS_H */ diff --git a/include/libbson-1.0/bson/bson-md5.h b/include/libbson-1.0/bson/bson-md5.h new file mode 100644 index 0000000..a319b7f --- /dev/null +++ b/include/libbson-1.0/bson/bson-md5.h @@ -0,0 +1,89 @@ +/* + Copyright (C) 1999, 2002 Aladdin Enterprises. All rights reserved. + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgement in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. + + L. Peter Deutsch + ghost@aladdin.com + + */ +/* $Id: md5.h,v 1.4 2002/04/13 19:20:28 lpd Exp $ */ +/* + Independent implementation of MD5 (RFC 1321). + + This code implements the MD5 Algorithm defined in RFC 1321, whose + text is available at + http://www.ietf.org/rfc/rfc1321.txt + The code is derived from the text of the RFC, including the test suite + (section A.5) but excluding the rest of Appendix A. It does not include + any code or documentation that is identified in the RFC as being + copyrighted. + + The original and principal author of md5.h is L. Peter Deutsch + . Other authors are noted in the change history + that follows (in reverse chronological order): + + 2002-04-13 lpd Removed support for non-ANSI compilers; removed + references to Ghostscript; clarified derivation from RFC 1321; + now handles byte order either statically or dynamically. + 1999-11-04 lpd Edited comments slightly for automatic TOC extraction. + 1999-10-18 lpd Fixed typo in header comment (ansi2knr rather than md5); + added conditionalization for C++ compilation from Martin + Purschke . + 1999-05-03 lpd Original version. + */ + + +/* + * The following MD5 implementation has been modified to use types as + * specified in libbson. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_MD5_H +#define BSON_MD5_H + + +#include "bson/bson-endian.h" + + +BSON_BEGIN_DECLS + + +typedef struct { + uint32_t count[2]; /* message length in bits, lsw first */ + uint32_t abcd[4]; /* digest buffer */ + uint8_t buf[64]; /* accumulate block */ +} bson_md5_t; + + +BSON_EXPORT (void) +bson_md5_init (bson_md5_t *pms) BSON_GNUC_DEPRECATED; +BSON_EXPORT (void) +bson_md5_append (bson_md5_t *pms, + const uint8_t *data, + uint32_t nbytes) BSON_GNUC_DEPRECATED; +BSON_EXPORT (void) +bson_md5_finish (bson_md5_t *pms, uint8_t digest[16]) BSON_GNUC_DEPRECATED; + + +BSON_END_DECLS + + +#endif /* BSON_MD5_H */ diff --git a/include/libbson-1.0/bson/bson-memory.h b/include/libbson-1.0/bson/bson-memory.h new file mode 100644 index 0000000..39d5914 --- /dev/null +++ b/include/libbson-1.0/bson/bson-memory.h @@ -0,0 +1,64 @@ +/* + * Copyright 2013 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_MEMORY_H +#define BSON_MEMORY_H + + +#include "bson/bson-macros.h" +#include "bson/bson-types.h" + + +BSON_BEGIN_DECLS + + +typedef void *(*bson_realloc_func) (void *mem, size_t num_bytes, void *ctx); + + +typedef struct _bson_mem_vtable_t { + void *(*malloc) (size_t num_bytes); + void *(*calloc) (size_t n_members, size_t num_bytes); + void *(*realloc) (void *mem, size_t num_bytes); + void (*free) (void *mem); + void *padding[4]; +} bson_mem_vtable_t; + + +BSON_EXPORT (void) +bson_mem_set_vtable (const bson_mem_vtable_t *vtable); +BSON_EXPORT (void) +bson_mem_restore_vtable (void); +BSON_EXPORT (void *) +bson_malloc (size_t num_bytes); +BSON_EXPORT (void *) +bson_malloc0 (size_t num_bytes); +BSON_EXPORT (void *) +bson_realloc (void *mem, size_t num_bytes); +BSON_EXPORT (void *) +bson_realloc_ctx (void *mem, size_t num_bytes, void *ctx); +BSON_EXPORT (void) +bson_free (void *mem); +BSON_EXPORT (void) +bson_zero_free (void *mem, size_t size); + + +BSON_END_DECLS + + +#endif /* BSON_MEMORY_H */ diff --git a/include/libbson-1.0/bson/bson-oid.h b/include/libbson-1.0/bson/bson-oid.h new file mode 100644 index 0000000..b954931 --- /dev/null +++ b/include/libbson-1.0/bson/bson-oid.h @@ -0,0 +1,244 @@ +/* + * Copyright 2013 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_OID_H +#define BSON_OID_H + + +#include + +#include "bson/bson-context.h" +#include "bson/bson-macros.h" +#include "bson/bson-types.h" +#include "bson/bson-endian.h" + + +BSON_BEGIN_DECLS + + +BSON_EXPORT (int) +bson_oid_compare (const bson_oid_t *oid1, const bson_oid_t *oid2); +BSON_EXPORT (void) +bson_oid_copy (const bson_oid_t *src, bson_oid_t *dst); +BSON_EXPORT (bool) +bson_oid_equal (const bson_oid_t *oid1, const bson_oid_t *oid2); +BSON_EXPORT (bool) +bson_oid_is_valid (const char *str, size_t length); +BSON_EXPORT (time_t) +bson_oid_get_time_t (const bson_oid_t *oid); +BSON_EXPORT (uint32_t) +bson_oid_hash (const bson_oid_t *oid); +BSON_EXPORT (void) +bson_oid_init (bson_oid_t *oid, bson_context_t *context); +BSON_EXPORT (void) +bson_oid_init_from_data (bson_oid_t *oid, const uint8_t *data); +BSON_EXPORT (void) +bson_oid_init_from_string (bson_oid_t *oid, const char *str); +BSON_EXPORT (void) +bson_oid_init_sequence (bson_oid_t *oid, + bson_context_t *context) BSON_GNUC_DEPRECATED; +BSON_EXPORT (void) +bson_oid_to_string (const bson_oid_t *oid, char str[25]); + + +/** + * bson_oid_compare_unsafe: + * @oid1: A bson_oid_t. + * @oid2: A bson_oid_t. + * + * Performs a qsort() style comparison between @oid1 and @oid2. + * + * This function is meant to be as fast as possible and therefore performs + * no argument validation. That is the callers responsibility. + * + * Returns: An integer < 0 if @oid1 is less than @oid2. Zero if they are equal. + * An integer > 0 if @oid1 is greater than @oid2. + */ +static BSON_INLINE int +bson_oid_compare_unsafe (const bson_oid_t *oid1, const bson_oid_t *oid2) +{ + return memcmp (oid1, oid2, sizeof *oid1); +} + + +/** + * bson_oid_equal_unsafe: + * @oid1: A bson_oid_t. + * @oid2: A bson_oid_t. + * + * Checks the equality of @oid1 and @oid2. + * + * This function is meant to be as fast as possible and therefore performs + * no checks for argument validity. That is the callers responsibility. + * + * Returns: true if @oid1 and @oid2 are equal; otherwise false. + */ +static BSON_INLINE bool +bson_oid_equal_unsafe (const bson_oid_t *oid1, const bson_oid_t *oid2) +{ + return !memcmp (oid1, oid2, sizeof *oid1); +} + +/** + * bson_oid_hash_unsafe: + * @oid: A bson_oid_t. + * + * This function performs a DJB style hash upon the bytes contained in @oid. + * The result is a hash key suitable for use in a hashtable. + * + * This function is meant to be as fast as possible and therefore performs no + * validation of arguments. The caller is responsible to ensure they are + * passing valid arguments. + * + * Returns: A uint32_t containing a hash code. + */ +static BSON_INLINE uint32_t +bson_oid_hash_unsafe (const bson_oid_t *oid) +{ + uint32_t hash = 5381; + uint32_t i; + + for (i = 0; i < sizeof oid->bytes; i++) { + hash = ((hash << 5) + hash) + oid->bytes[i]; + } + + return hash; +} + + +/** + * bson_oid_copy_unsafe: + * @src: A bson_oid_t to copy from. + * @dst: A bson_oid_t to copy into. + * + * Copies the contents of @src into @dst. This function is meant to be as + * fast as possible and therefore performs no argument checking. It is the + * callers responsibility to ensure they are passing valid data into the + * function. + */ +static BSON_INLINE void +bson_oid_copy_unsafe (const bson_oid_t *src, bson_oid_t *dst) +{ + memcpy (dst, src, sizeof *src); +} + + +/** + * bson_oid_parse_hex_char: + * @hex: A character to parse to its integer value. + * + * This function contains a jump table to return the integer value for a + * character containing a hexadecimal value (0-9, a-f, A-F). If the character + * is not a hexadecimal character then zero is returned. + * + * Returns: An integer between 0 and 15. + */ +static BSON_INLINE uint8_t +bson_oid_parse_hex_char (char hex) +{ + switch (hex) { + case '0': + return 0; + case '1': + return 1; + case '2': + return 2; + case '3': + return 3; + case '4': + return 4; + case '5': + return 5; + case '6': + return 6; + case '7': + return 7; + case '8': + return 8; + case '9': + return 9; + case 'a': + case 'A': + return 0xa; + case 'b': + case 'B': + return 0xb; + case 'c': + case 'C': + return 0xc; + case 'd': + case 'D': + return 0xd; + case 'e': + case 'E': + return 0xe; + case 'f': + case 'F': + return 0xf; + default: + return 0; + } +} + + +/** + * bson_oid_init_from_string_unsafe: + * @oid: A bson_oid_t to store the result. + * @str: A 24-character hexadecimal encoded string. + * + * Parses a string containing 24 hexadecimal encoded bytes into a bson_oid_t. + * This function is meant to be as fast as possible and inlined into your + * code. For that purpose, the function does not perform any sort of bounds + * checking and it is the callers responsibility to ensure they are passing + * valid input to the function. + */ +static BSON_INLINE void +bson_oid_init_from_string_unsafe (bson_oid_t *oid, const char *str) +{ + int i; + + for (i = 0; i < 12; i++) { + oid->bytes[i] = ((bson_oid_parse_hex_char (str[2 * i]) << 4) | + (bson_oid_parse_hex_char (str[2 * i + 1]))); + } +} + + +/** + * bson_oid_get_time_t_unsafe: + * @oid: A bson_oid_t. + * + * Fetches the time @oid was generated. + * + * Returns: A time_t containing the UNIX timestamp of generation. + */ +static BSON_INLINE time_t +bson_oid_get_time_t_unsafe (const bson_oid_t *oid) +{ + uint32_t t; + + memcpy (&t, oid, sizeof (t)); + return BSON_UINT32_FROM_BE (t); +} + + +BSON_END_DECLS + + +#endif /* BSON_OID_H */ diff --git a/include/libbson-1.0/bson/bson-prelude.h b/include/libbson-1.0/bson/bson-prelude.h new file mode 100644 index 0000000..7d917b5 --- /dev/null +++ b/include/libbson-1.0/bson/bson-prelude.h @@ -0,0 +1,19 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if !defined(BSON_INSIDE) && !defined(BSON_COMPILATION) +#error "Only can be included directly." +#endif \ No newline at end of file diff --git a/include/libbson-1.0/bson/bson-reader.h b/include/libbson-1.0/bson/bson-reader.h new file mode 100644 index 0000000..344f1c7 --- /dev/null +++ b/include/libbson-1.0/bson/bson-reader.h @@ -0,0 +1,117 @@ +/* + * Copyright 2013 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_READER_H +#define BSON_READER_H + + +#include "bson/bson-compat.h" +#include "bson/bson-oid.h" +#include "bson/bson-types.h" + + +BSON_BEGIN_DECLS + + +#define BSON_ERROR_READER_BADFD 1 + + +/* + *-------------------------------------------------------------------------- + * + * bson_reader_read_func_t -- + * + * This function is a callback used by bson_reader_t to read the + * next chunk of data from the underlying opaque file descriptor. + * + * This function is meant to operate similar to the read() function + * as part of libc on UNIX-like systems. + * + * Parameters: + * @handle: The handle to read from. + * @buf: The buffer to read into. + * @count: The number of bytes to read. + * + * Returns: + * 0 for end of stream. + * -1 for read failure. + * Greater than zero for number of bytes read into @buf. + * + * Side effects: + * None. + * + *-------------------------------------------------------------------------- + */ + +typedef ssize_t (*bson_reader_read_func_t) (void *handle, /* IN */ + void *buf, /* IN */ + size_t count); /* IN */ + + +/* + *-------------------------------------------------------------------------- + * + * bson_reader_destroy_func_t -- + * + * Destroy callback to release any resources associated with the + * opaque handle. + * + * Parameters: + * @handle: the handle provided to bson_reader_new_from_handle(). + * + * Returns: + * None. + * + * Side effects: + * None. + * + *-------------------------------------------------------------------------- + */ + +typedef void (*bson_reader_destroy_func_t) (void *handle); /* IN */ + + +BSON_EXPORT (bson_reader_t *) +bson_reader_new_from_handle (void *handle, + bson_reader_read_func_t rf, + bson_reader_destroy_func_t df); +BSON_EXPORT (bson_reader_t *) +bson_reader_new_from_fd (int fd, bool close_on_destroy); +BSON_EXPORT (bson_reader_t *) +bson_reader_new_from_file (const char *path, bson_error_t *error); +BSON_EXPORT (bson_reader_t *) +bson_reader_new_from_data (const uint8_t *data, size_t length); +BSON_EXPORT (void) +bson_reader_destroy (bson_reader_t *reader); +BSON_EXPORT (void) +bson_reader_set_read_func (bson_reader_t *reader, bson_reader_read_func_t func); +BSON_EXPORT (void) +bson_reader_set_destroy_func (bson_reader_t *reader, + bson_reader_destroy_func_t func); +BSON_EXPORT (const bson_t *) +bson_reader_read (bson_reader_t *reader, bool *reached_eof); +BSON_EXPORT (off_t) +bson_reader_tell (bson_reader_t *reader); +BSON_EXPORT (void) +bson_reader_reset (bson_reader_t *reader); + +BSON_END_DECLS + + +#endif /* BSON_READER_H */ diff --git a/include/libbson-1.0/bson/bson-string.h b/include/libbson-1.0/bson/bson-string.h new file mode 100644 index 0000000..a4264f8 --- /dev/null +++ b/include/libbson-1.0/bson/bson-string.h @@ -0,0 +1,84 @@ +/* + * Copyright 2013 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_STRING_H +#define BSON_STRING_H + + +#include + +#include "bson/bson-macros.h" +#include "bson/bson-types.h" + + +BSON_BEGIN_DECLS + + +typedef struct { + char *str; + uint32_t len; + uint32_t alloc; +} bson_string_t; + + +BSON_EXPORT (bson_string_t *) +bson_string_new (const char *str); +BSON_EXPORT (char *) +bson_string_free (bson_string_t *string, bool free_segment); +BSON_EXPORT (void) +bson_string_append (bson_string_t *string, const char *str); +BSON_EXPORT (void) +bson_string_append_c (bson_string_t *string, char str); +BSON_EXPORT (void) +bson_string_append_unichar (bson_string_t *string, bson_unichar_t unichar); +BSON_EXPORT (void) +bson_string_append_printf (bson_string_t *string, const char *format, ...) + BSON_GNUC_PRINTF (2, 3); +BSON_EXPORT (void) +bson_string_truncate (bson_string_t *string, uint32_t len); +BSON_EXPORT (char *) +bson_strdup (const char *str); +BSON_EXPORT (char *) +bson_strdup_printf (const char *format, ...) BSON_GNUC_PRINTF (1, 2); +BSON_EXPORT (char *) +bson_strdupv_printf (const char *format, va_list args) BSON_GNUC_PRINTF (1, 0); +BSON_EXPORT (char *) +bson_strndup (const char *str, size_t n_bytes); +BSON_EXPORT (void) +bson_strncpy (char *dst, const char *src, size_t size); +BSON_EXPORT (int) +bson_vsnprintf (char *str, size_t size, const char *format, va_list ap) + BSON_GNUC_PRINTF (3, 0); +BSON_EXPORT (int) +bson_snprintf (char *str, size_t size, const char *format, ...) + BSON_GNUC_PRINTF (3, 4); +BSON_EXPORT (void) +bson_strfreev (char **strv); +BSON_EXPORT (size_t) +bson_strnlen (const char *s, size_t maxlen); +BSON_EXPORT (int64_t) +bson_ascii_strtoll (const char *str, char **endptr, int base); +BSON_EXPORT (int) +bson_strcasecmp (const char *s1, const char *s2); + + +BSON_END_DECLS + + +#endif /* BSON_STRING_H */ diff --git a/include/libbson-1.0/bson/bson-types.h b/include/libbson-1.0/bson/bson-types.h new file mode 100644 index 0000000..d446fec --- /dev/null +++ b/include/libbson-1.0/bson/bson-types.h @@ -0,0 +1,564 @@ +/* + * Copyright 2013 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_TYPES_H +#define BSON_TYPES_H + + +#include +#include + +#include "bson/bson-macros.h" +#include "bson/bson-config.h" +#include "bson/bson-compat.h" +#include "bson/bson-endian.h" + +BSON_BEGIN_DECLS + + +/* + *-------------------------------------------------------------------------- + * + * bson_unichar_t -- + * + * bson_unichar_t provides an unsigned 32-bit type for containing + * unicode characters. When iterating UTF-8 sequences, this should + * be used to avoid losing the high-bits of non-ascii characters. + * + * See also: + * bson_string_append_unichar() + * + *-------------------------------------------------------------------------- + */ + +typedef uint32_t bson_unichar_t; + + +/** + * bson_context_flags_t: + * + * This enumeration is used to configure a bson_context_t. + * + * %BSON_CONTEXT_NONE: Use default options. + * %BSON_CONTEXT_THREAD_SAFE: Context will be called from multiple threads. + * %BSON_CONTEXT_DISABLE_PID_CACHE: Call getpid() instead of caching the + * result of getpid() when initializing the context. + * %BSON_CONTEXT_DISABLE_HOST_CACHE: Call gethostname() instead of caching the + * result of gethostname() when initializing the context. + */ +typedef enum { + BSON_CONTEXT_NONE = 0, + BSON_CONTEXT_THREAD_SAFE = (1 << 0), + BSON_CONTEXT_DISABLE_HOST_CACHE = (1 << 1), + BSON_CONTEXT_DISABLE_PID_CACHE = (1 << 2), +#ifdef BSON_HAVE_SYSCALL_TID + BSON_CONTEXT_USE_TASK_ID = (1 << 3), +#endif +} bson_context_flags_t; + + +/** + * bson_context_t: + * + * This structure manages context for the bson library. It handles + * configuration for thread-safety and other performance related requirements. + * Consumers will create a context and may use multiple under a variety of + * situations. + * + * If your program calls fork(), you should initialize a new bson_context_t + * using bson_context_init(). + * + * If you are using threading, it is suggested that you use a bson_context_t + * per thread for best performance. Alternatively, you can initialize the + * bson_context_t with BSON_CONTEXT_THREAD_SAFE, although a performance penalty + * will be incurred. + * + * Many functions will require that you provide a bson_context_t such as OID + * generation. + * + * This structure is opaque in that you cannot see the contents of the + * structure. However, it is stack allocatable in that enough padding is + * provided in _bson_context_t to hold the structure. + */ +typedef struct _bson_context_t bson_context_t; + + +/** + * bson_t: + * + * This structure manages a buffer whose contents are a properly formatted + * BSON document. You may perform various transforms on the BSON documents. + * Additionally, it can be iterated over using bson_iter_t. + * + * See bson_iter_init() for iterating the contents of a bson_t. + * + * When building a bson_t structure using the various append functions, + * memory allocations may occur. That is performed using power of two + * allocations and realloc(). + * + * See http://bsonspec.org for the BSON document spec. + * + * This structure is meant to fit in two sequential 64-byte cachelines. + */ +#ifdef BSON_MEMCHECK +BSON_ALIGNED_BEGIN (128) +typedef struct _bson_t { + uint32_t flags; /* Internal flags for the bson_t. */ + uint32_t len; /* Length of BSON data. */ + char *canary; /* For valgrind check */ + uint8_t padding[120 - sizeof (char*)]; +} bson_t BSON_ALIGNED_END (128); +#else +BSON_ALIGNED_BEGIN (128) +typedef struct _bson_t { + uint32_t flags; /* Internal flags for the bson_t. */ + uint32_t len; /* Length of BSON data. */ + uint8_t padding[120]; /* Padding for stack allocation. */ +} bson_t BSON_ALIGNED_END (128); +#endif + + +/** + * BSON_INITIALIZER: + * + * This macro can be used to initialize a #bson_t structure on the stack + * without calling bson_init(). + * + * |[ + * bson_t b = BSON_INITIALIZER; + * ]| + */ +#ifdef BSON_MEMCHECK +#define BSON_INITIALIZER \ + { \ + 3, 5, \ + bson_malloc (1), \ + { \ + 5 \ + }, \ + } +#else +#define BSON_INITIALIZER \ + { \ + 3, 5, \ + { \ + 5 \ + } \ + } +#endif + + +BSON_STATIC_ASSERT2 (bson_t, sizeof (bson_t) == 128); + + +/** + * bson_oid_t: + * + * This structure contains the binary form of a BSON Object Id as specified + * on http://bsonspec.org. If you would like the bson_oid_t in string form + * see bson_oid_to_string() or bson_oid_to_string_r(). + */ +typedef struct { + uint8_t bytes[12]; +} bson_oid_t; + +BSON_STATIC_ASSERT2 (oid_t, sizeof (bson_oid_t) == 12); + +/** + * bson_decimal128_t: + * + * @high The high-order bytes of the decimal128. This field contains sign, + * combination bits, exponent, and part of the coefficient continuation. + * @low The low-order bytes of the decimal128. This field contains the second + * part of the coefficient continuation. + * + * This structure is a boxed type containing the value for the BSON decimal128 + * type. The structure stores the 128 bits such that they correspond to the + * native format for the IEEE decimal128 type, if it is implemented. + **/ +typedef struct { +#if BSON_BYTE_ORDER == BSON_LITTLE_ENDIAN + uint64_t low; + uint64_t high; +#elif BSON_BYTE_ORDER == BSON_BIG_ENDIAN + uint64_t high; + uint64_t low; +#endif +} bson_decimal128_t; + + +/** + * bson_validate_flags_t: + * + * This enumeration is used for validation of BSON documents. It allows + * selective control on what you wish to validate. + * + * %BSON_VALIDATE_NONE: No additional validation occurs. + * %BSON_VALIDATE_UTF8: Check that strings are valid UTF-8. + * %BSON_VALIDATE_DOLLAR_KEYS: Check that keys do not start with $. + * %BSON_VALIDATE_DOT_KEYS: Check that keys do not contain a period. + * %BSON_VALIDATE_UTF8_ALLOW_NULL: Allow NUL bytes in UTF-8 text. + * %BSON_VALIDATE_EMPTY_KEYS: Prohibit zero-length field names + */ +typedef enum { + BSON_VALIDATE_NONE = 0, + BSON_VALIDATE_UTF8 = (1 << 0), + BSON_VALIDATE_DOLLAR_KEYS = (1 << 1), + BSON_VALIDATE_DOT_KEYS = (1 << 2), + BSON_VALIDATE_UTF8_ALLOW_NULL = (1 << 3), + BSON_VALIDATE_EMPTY_KEYS = (1 << 4), +} bson_validate_flags_t; + + +/** + * bson_type_t: + * + * This enumeration contains all of the possible types within a BSON document. + * Use bson_iter_type() to fetch the type of a field while iterating over it. + */ +typedef enum { + BSON_TYPE_EOD = 0x00, + BSON_TYPE_DOUBLE = 0x01, + BSON_TYPE_UTF8 = 0x02, + BSON_TYPE_DOCUMENT = 0x03, + BSON_TYPE_ARRAY = 0x04, + BSON_TYPE_BINARY = 0x05, + BSON_TYPE_UNDEFINED = 0x06, + BSON_TYPE_OID = 0x07, + BSON_TYPE_BOOL = 0x08, + BSON_TYPE_DATE_TIME = 0x09, + BSON_TYPE_NULL = 0x0A, + BSON_TYPE_REGEX = 0x0B, + BSON_TYPE_DBPOINTER = 0x0C, + BSON_TYPE_CODE = 0x0D, + BSON_TYPE_SYMBOL = 0x0E, + BSON_TYPE_CODEWSCOPE = 0x0F, + BSON_TYPE_INT32 = 0x10, + BSON_TYPE_TIMESTAMP = 0x11, + BSON_TYPE_INT64 = 0x12, + BSON_TYPE_DECIMAL128 = 0x13, + BSON_TYPE_MAXKEY = 0x7F, + BSON_TYPE_MINKEY = 0xFF, +} bson_type_t; + + +/** + * bson_subtype_t: + * + * This enumeration contains the various subtypes that may be used in a binary + * field. See http://bsonspec.org for more information. + */ +typedef enum { + BSON_SUBTYPE_BINARY = 0x00, + BSON_SUBTYPE_FUNCTION = 0x01, + BSON_SUBTYPE_BINARY_DEPRECATED = 0x02, + BSON_SUBTYPE_UUID_DEPRECATED = 0x03, + BSON_SUBTYPE_UUID = 0x04, + BSON_SUBTYPE_MD5 = 0x05, + BSON_SUBTYPE_USER = 0x80, +} bson_subtype_t; + + +/* + *-------------------------------------------------------------------------- + * + * bson_value_t -- + * + * A boxed type to contain various bson_type_t types. + * + * See also: + * bson_value_copy() + * bson_value_destroy() + * + *-------------------------------------------------------------------------- + */ + +BSON_ALIGNED_BEGIN (8) +typedef struct _bson_value_t { + bson_type_t value_type; + int32_t padding; + union { + bson_oid_t v_oid; + int64_t v_int64; + int32_t v_int32; + int8_t v_int8; + double v_double; + bool v_bool; + int64_t v_datetime; + struct { + uint32_t timestamp; + uint32_t increment; + } v_timestamp; + struct { + char *str; + uint32_t len; + } v_utf8; + struct { + uint8_t *data; + uint32_t data_len; + } v_doc; + struct { + uint8_t *data; + uint32_t data_len; + bson_subtype_t subtype; + } v_binary; + struct { + char *regex; + char *options; + } v_regex; + struct { + char *collection; + uint32_t collection_len; + bson_oid_t oid; + } v_dbpointer; + struct { + char *code; + uint32_t code_len; + } v_code; + struct { + char *code; + uint8_t *scope_data; + uint32_t code_len; + uint32_t scope_len; + } v_codewscope; + struct { + char *symbol; + uint32_t len; + } v_symbol; + bson_decimal128_t v_decimal128; + } value; +} bson_value_t BSON_ALIGNED_END (8); + + +/** + * bson_iter_t: + * + * This structure manages iteration over a bson_t structure. It keeps track + * of the location of the current key and value within the buffer. Using the + * various functions to get the value of the iter will read from these + * locations. + * + * This structure is safe to discard on the stack. No cleanup is necessary + * after using it. + */ +BSON_ALIGNED_BEGIN (128) +typedef struct { + const uint8_t *raw; /* The raw buffer being iterated. */ + uint32_t len; /* The length of raw. */ + uint32_t off; /* The offset within the buffer. */ + uint32_t type; /* The offset of the type byte. */ + uint32_t key; /* The offset of the key byte. */ + uint32_t d1; /* The offset of the first data byte. */ + uint32_t d2; /* The offset of the second data byte. */ + uint32_t d3; /* The offset of the third data byte. */ + uint32_t d4; /* The offset of the fourth data byte. */ + uint32_t next_off; /* The offset of the next field. */ + uint32_t err_off; /* The offset of the error. */ + bson_value_t value; /* Internal value for various state. */ +} bson_iter_t BSON_ALIGNED_END (128); + + +/** + * bson_reader_t: + * + * This structure is used to iterate over a sequence of BSON documents. It + * allows for them to be iterated with the possibility of no additional + * memory allocations under certain circumstances such as reading from an + * incoming mongo packet. + */ + +BSON_ALIGNED_BEGIN (BSON_ALIGN_OF_PTR) +typedef struct { + uint32_t type; + /*< private >*/ +} bson_reader_t BSON_ALIGNED_END (BSON_ALIGN_OF_PTR); + + +/** + * bson_visitor_t: + * + * This structure contains a series of pointers that can be executed for + * each field of a BSON document based on the field type. + * + * For example, if an int32 field is found, visit_int32 will be called. + * + * When visiting each field using bson_iter_visit_all(), you may provide a + * data pointer that will be provided with each callback. This might be useful + * if you are marshaling to another language. + * + * You may pre-maturely stop the visitation of fields by returning true in your + * visitor. Returning false will continue visitation to further fields. + */ +BSON_ALIGNED_BEGIN (8) +typedef struct { + /* run before / after descending into a document */ + bool (*visit_before) (const bson_iter_t *iter, const char *key, void *data); + bool (*visit_after) (const bson_iter_t *iter, const char *key, void *data); + /* corrupt BSON, or unsupported type and visit_unsupported_type not set */ + void (*visit_corrupt) (const bson_iter_t *iter, void *data); + /* normal bson field callbacks */ + bool (*visit_double) (const bson_iter_t *iter, + const char *key, + double v_double, + void *data); + bool (*visit_utf8) (const bson_iter_t *iter, + const char *key, + size_t v_utf8_len, + const char *v_utf8, + void *data); + bool (*visit_document) (const bson_iter_t *iter, + const char *key, + const bson_t *v_document, + void *data); + bool (*visit_array) (const bson_iter_t *iter, + const char *key, + const bson_t *v_array, + void *data); + bool (*visit_binary) (const bson_iter_t *iter, + const char *key, + bson_subtype_t v_subtype, + size_t v_binary_len, + const uint8_t *v_binary, + void *data); + /* normal field with deprecated "Undefined" BSON type */ + bool (*visit_undefined) (const bson_iter_t *iter, + const char *key, + void *data); + bool (*visit_oid) (const bson_iter_t *iter, + const char *key, + const bson_oid_t *v_oid, + void *data); + bool (*visit_bool) (const bson_iter_t *iter, + const char *key, + bool v_bool, + void *data); + bool (*visit_date_time) (const bson_iter_t *iter, + const char *key, + int64_t msec_since_epoch, + void *data); + bool (*visit_null) (const bson_iter_t *iter, const char *key, void *data); + bool (*visit_regex) (const bson_iter_t *iter, + const char *key, + const char *v_regex, + const char *v_options, + void *data); + bool (*visit_dbpointer) (const bson_iter_t *iter, + const char *key, + size_t v_collection_len, + const char *v_collection, + const bson_oid_t *v_oid, + void *data); + bool (*visit_code) (const bson_iter_t *iter, + const char *key, + size_t v_code_len, + const char *v_code, + void *data); + bool (*visit_symbol) (const bson_iter_t *iter, + const char *key, + size_t v_symbol_len, + const char *v_symbol, + void *data); + bool (*visit_codewscope) (const bson_iter_t *iter, + const char *key, + size_t v_code_len, + const char *v_code, + const bson_t *v_scope, + void *data); + bool (*visit_int32) (const bson_iter_t *iter, + const char *key, + int32_t v_int32, + void *data); + bool (*visit_timestamp) (const bson_iter_t *iter, + const char *key, + uint32_t v_timestamp, + uint32_t v_increment, + void *data); + bool (*visit_int64) (const bson_iter_t *iter, + const char *key, + int64_t v_int64, + void *data); + bool (*visit_maxkey) (const bson_iter_t *iter, const char *key, void *data); + bool (*visit_minkey) (const bson_iter_t *iter, const char *key, void *data); + /* if set, called instead of visit_corrupt when an apparently valid BSON + * includes an unrecognized field type (reading future version of BSON) */ + void (*visit_unsupported_type) (const bson_iter_t *iter, + const char *key, + uint32_t type_code, + void *data); + bool (*visit_decimal128) (const bson_iter_t *iter, + const char *key, + const bson_decimal128_t *v_decimal128, + void *data); + + void *padding[7]; +} bson_visitor_t BSON_ALIGNED_END (8); + +#define BSON_ERROR_BUFFER_SIZE 504 + +BSON_ALIGNED_BEGIN (8) +typedef struct _bson_error_t { + uint32_t domain; + uint32_t code; + char message[BSON_ERROR_BUFFER_SIZE]; +} bson_error_t BSON_ALIGNED_END (8); + + +BSON_STATIC_ASSERT2 (error_t, sizeof (bson_error_t) == 512); + + +/** + * bson_next_power_of_two: + * @v: A 32-bit unsigned integer of required bytes. + * + * Determines the next larger power of two for the value of @v + * in a constant number of operations. + * + * It is up to the caller to guarantee this will not overflow. + * + * Returns: The next power of 2 from @v. + */ +static BSON_INLINE size_t +bson_next_power_of_two (size_t v) +{ + v--; + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; +#if BSON_WORD_SIZE == 64 + v |= v >> 32; +#endif + v++; + + return v; +} + + +static BSON_INLINE bool +bson_is_power_of_two (uint32_t v) +{ + return ((v != 0) && ((v & (v - 1)) == 0)); +} + + +BSON_END_DECLS + + +#endif /* BSON_TYPES_H */ diff --git a/include/libbson-1.0/bson/bson-utf8.h b/include/libbson-1.0/bson/bson-utf8.h new file mode 100644 index 0000000..f83f7a2 --- /dev/null +++ b/include/libbson-1.0/bson/bson-utf8.h @@ -0,0 +1,46 @@ +/* + * Copyright 2013 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_UTF8_H +#define BSON_UTF8_H + + +#include "bson/bson-macros.h" +#include "bson/bson-types.h" + + +BSON_BEGIN_DECLS + + +BSON_EXPORT (bool) +bson_utf8_validate (const char *utf8, size_t utf8_len, bool allow_null); +BSON_EXPORT (char *) +bson_utf8_escape_for_json (const char *utf8, ssize_t utf8_len); +BSON_EXPORT (bson_unichar_t) +bson_utf8_get_char (const char *utf8); +BSON_EXPORT (const char *) +bson_utf8_next_char (const char *utf8); +BSON_EXPORT (void) +bson_utf8_from_unichar (bson_unichar_t unichar, char utf8[6], uint32_t *len); + + +BSON_END_DECLS + + +#endif /* BSON_UTF8_H */ diff --git a/include/libbson-1.0/bson/bson-value.h b/include/libbson-1.0/bson/bson-value.h new file mode 100644 index 0000000..c2ef2dc --- /dev/null +++ b/include/libbson-1.0/bson/bson-value.h @@ -0,0 +1,40 @@ +/* + * Copyright 2014 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_VALUE_H +#define BSON_VALUE_H + + +#include "bson/bson-macros.h" +#include "bson/bson-types.h" + + +BSON_BEGIN_DECLS + + +BSON_EXPORT (void) +bson_value_copy (const bson_value_t *src, bson_value_t *dst); +BSON_EXPORT (void) +bson_value_destroy (bson_value_t *value); + + +BSON_END_DECLS + + +#endif /* BSON_VALUE_H */ diff --git a/include/libbson-1.0/bson/bson-version-functions.h b/include/libbson-1.0/bson/bson-version-functions.h new file mode 100644 index 0000000..f224feb --- /dev/null +++ b/include/libbson-1.0/bson/bson-version-functions.h @@ -0,0 +1,41 @@ +/* + * Copyright 2015 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#include "bson/bson-prelude.h" + + +#ifndef BSON_VERSION_FUNCTIONS_H +#define BSON_VERSION_FUNCTIONS_H + +#include "bson/bson-types.h" + +BSON_BEGIN_DECLS + +BSON_EXPORT (int) +bson_get_major_version (void); +BSON_EXPORT (int) +bson_get_minor_version (void); +BSON_EXPORT (int) +bson_get_micro_version (void); +BSON_EXPORT (const char *) +bson_get_version (void); +BSON_EXPORT (bool) +bson_check_version (int required_major, int required_minor, int required_micro); + +BSON_END_DECLS + +#endif /* BSON_VERSION_FUNCTIONS_H */ diff --git a/include/libbson-1.0/bson/bson-version.h b/include/libbson-1.0/bson/bson-version.h new file mode 100644 index 0000000..bf76313 --- /dev/null +++ b/include/libbson-1.0/bson/bson-version.h @@ -0,0 +1,101 @@ +/* + * Copyright 2013 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#if !defined (BSON_INSIDE) && !defined (BSON_COMPILATION) +#error "Only can be included directly." +#endif + + +#ifndef BSON_VERSION_H +#define BSON_VERSION_H + + +/** + * BSON_MAJOR_VERSION: + * + * BSON major version component (e.g. 1 if %BSON_VERSION is 1.2.3) + */ +#define BSON_MAJOR_VERSION (1) + + +/** + * BSON_MINOR_VERSION: + * + * BSON minor version component (e.g. 2 if %BSON_VERSION is 1.2.3) + */ +#define BSON_MINOR_VERSION (14) + + +/** + * BSON_MICRO_VERSION: + * + * BSON micro version component (e.g. 3 if %BSON_VERSION is 1.2.3) + */ +#define BSON_MICRO_VERSION (0) + + +/** + * BSON_PRERELEASE_VERSION: + * + * BSON prerelease version component (e.g. pre if %BSON_VERSION is 1.2.3-pre) + */ +#define BSON_PRERELEASE_VERSION () + +/** + * BSON_VERSION: + * + * BSON version. + */ +#define BSON_VERSION (1.14.0) + + +/** + * BSON_VERSION_S: + * + * BSON version, encoded as a string, useful for printing and + * concatenation. + */ +#define BSON_VERSION_S "1.14.0" + + +/** + * BSON_VERSION_HEX: + * + * BSON version, encoded as an hexadecimal number, useful for + * integer comparisons. + */ +#define BSON_VERSION_HEX (BSON_MAJOR_VERSION << 24 | \ + BSON_MINOR_VERSION << 16 | \ + BSON_MICRO_VERSION << 8) + + +/** + * BSON_CHECK_VERSION: + * @major: required major version + * @minor: required minor version + * @micro: required micro version + * + * Compile-time version checking. Evaluates to %TRUE if the version + * of BSON is greater than the required one. + */ +#define BSON_CHECK_VERSION(major,minor,micro) \ + (BSON_MAJOR_VERSION > (major) || \ + (BSON_MAJOR_VERSION == (major) && BSON_MINOR_VERSION > (minor)) || \ + (BSON_MAJOR_VERSION == (major) && BSON_MINOR_VERSION == (minor) && \ + BSON_MICRO_VERSION >= (micro))) + +#endif /* BSON_VERSION_H */ diff --git a/include/libbson-1.0/bson/bson-writer.h b/include/libbson-1.0/bson/bson-writer.h new file mode 100644 index 0000000..58192d8 --- /dev/null +++ b/include/libbson-1.0/bson/bson-writer.h @@ -0,0 +1,65 @@ +/* + * Copyright 2013 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson/bson-prelude.h" + + +#ifndef BSON_WRITER_H +#define BSON_WRITER_H + + +#include "bson/bson.h" + + +BSON_BEGIN_DECLS + + +/** + * bson_writer_t: + * + * The bson_writer_t structure is a helper for writing a series of BSON + * documents to a single malloc() buffer. You can provide a realloc() style + * function to grow the buffer as you go. + * + * This is useful if you want to build a series of BSON documents right into + * the target buffer for an outgoing packet. The offset parameter allows you to + * start at an offset of the target buffer. + */ +typedef struct _bson_writer_t bson_writer_t; + + +BSON_EXPORT (bson_writer_t *) +bson_writer_new (uint8_t **buf, + size_t *buflen, + size_t offset, + bson_realloc_func realloc_func, + void *realloc_func_ctx); +BSON_EXPORT (void) +bson_writer_destroy (bson_writer_t *writer); +BSON_EXPORT (size_t) +bson_writer_get_length (bson_writer_t *writer); +BSON_EXPORT (bool) +bson_writer_begin (bson_writer_t *writer, bson_t **bson); +BSON_EXPORT (void) +bson_writer_end (bson_writer_t *writer); +BSON_EXPORT (void) +bson_writer_rollback (bson_writer_t *writer); + + +BSON_END_DECLS + + +#endif /* BSON_WRITER_H */ diff --git a/include/libbson-1.0/bson/bson.h b/include/libbson-1.0/bson/bson.h new file mode 100644 index 0000000..d1ea20c --- /dev/null +++ b/include/libbson-1.0/bson/bson.h @@ -0,0 +1,1150 @@ +/* + * Copyright 2013 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef BSON_H +#define BSON_H + +#define BSON_INSIDE + +#include "bson/bson-compat.h" + +#include +#include + +#include "bson/bson-macros.h" +#include "bson/bson-config.h" +#include "bson/bson-atomic.h" +#include "bson/bson-context.h" +#include "bson/bson-clock.h" +#include "bson/bson-decimal128.h" +#include "bson/bson-error.h" +#include "bson/bson-iter.h" +#include "bson/bson-json.h" +#include "bson/bson-keys.h" +#include "bson/bson-md5.h" +#include "bson/bson-memory.h" +#include "bson/bson-oid.h" +#include "bson/bson-reader.h" +#include "bson/bson-string.h" +#include "bson/bson-types.h" +#include "bson/bson-utf8.h" +#include "bson/bson-value.h" +#include "bson/bson-version.h" +#include "bson/bson-version-functions.h" +#include "bson/bson-writer.h" +#include "bson/bcon.h" + +#undef BSON_INSIDE + + +BSON_BEGIN_DECLS + + +/** + * bson_empty: + * @b: a bson_t. + * + * Checks to see if @b is an empty BSON document. An empty BSON document is + * a 5 byte document which contains the length (4 bytes) and a single NUL + * byte indicating end of fields. + */ +#define bson_empty(b) (((b)->len == 5) || !bson_get_data ((b))[4]) + + +/** + * bson_empty0: + * + * Like bson_empty() but treats NULL the same as an empty bson_t document. + */ +#define bson_empty0(b) (!(b) || bson_empty (b)) + + +/** + * bson_clear: + * + * Easily free a bson document and set it to NULL. Use like: + * + * bson_t *doc = bson_new(); + * bson_clear (&doc); + * BSON_ASSERT (doc == NULL); + */ +#define bson_clear(bptr) \ + do { \ + if (*(bptr)) { \ + bson_destroy (*(bptr)); \ + *(bptr) = NULL; \ + } \ + } while (0) + + +/** + * BSON_MAX_SIZE: + * + * The maximum size in bytes of a BSON document. + */ +#define BSON_MAX_SIZE ((size_t) ((1U << 31) - 1)) + + +#define BSON_APPEND_ARRAY(b, key, val) \ + bson_append_array (b, key, (int) strlen (key), val) + +#define BSON_APPEND_ARRAY_BEGIN(b, key, child) \ + bson_append_array_begin (b, key, (int) strlen (key), child) + +#define BSON_APPEND_BINARY(b, key, subtype, val, len) \ + bson_append_binary (b, key, (int) strlen (key), subtype, val, len) + +#define BSON_APPEND_BOOL(b, key, val) \ + bson_append_bool (b, key, (int) strlen (key), val) + +#define BSON_APPEND_CODE(b, key, val) \ + bson_append_code (b, key, (int) strlen (key), val) + +#define BSON_APPEND_CODE_WITH_SCOPE(b, key, val, scope) \ + bson_append_code_with_scope (b, key, (int) strlen (key), val, scope) + +#define BSON_APPEND_DBPOINTER(b, key, coll, oid) \ + bson_append_dbpointer (b, key, (int) strlen (key), coll, oid) + +#define BSON_APPEND_DOCUMENT_BEGIN(b, key, child) \ + bson_append_document_begin (b, key, (int) strlen (key), child) + +#define BSON_APPEND_DOUBLE(b, key, val) \ + bson_append_double (b, key, (int) strlen (key), val) + +#define BSON_APPEND_DOCUMENT(b, key, val) \ + bson_append_document (b, key, (int) strlen (key), val) + +#define BSON_APPEND_INT32(b, key, val) \ + bson_append_int32 (b, key, (int) strlen (key), val) + +#define BSON_APPEND_INT64(b, key, val) \ + bson_append_int64 (b, key, (int) strlen (key), val) + +#define BSON_APPEND_MINKEY(b, key) \ + bson_append_minkey (b, key, (int) strlen (key)) + +#define BSON_APPEND_DECIMAL128(b, key, val) \ + bson_append_decimal128 (b, key, (int) strlen (key), val) + +#define BSON_APPEND_MAXKEY(b, key) \ + bson_append_maxkey (b, key, (int) strlen (key)) + +#define BSON_APPEND_NULL(b, key) bson_append_null (b, key, (int) strlen (key)) + +#define BSON_APPEND_OID(b, key, val) \ + bson_append_oid (b, key, (int) strlen (key), val) + +#define BSON_APPEND_REGEX(b, key, val, opt) \ + bson_append_regex (b, key, (int) strlen (key), val, opt) + +#define BSON_APPEND_UTF8(b, key, val) \ + bson_append_utf8 (b, key, (int) strlen (key), val, (int) strlen (val)) + +#define BSON_APPEND_SYMBOL(b, key, val) \ + bson_append_symbol (b, key, (int) strlen (key), val, (int) strlen (val)) + +#define BSON_APPEND_TIME_T(b, key, val) \ + bson_append_time_t (b, key, (int) strlen (key), val) + +#define BSON_APPEND_TIMEVAL(b, key, val) \ + bson_append_timeval (b, key, (int) strlen (key), val) + +#define BSON_APPEND_DATE_TIME(b, key, val) \ + bson_append_date_time (b, key, (int) strlen (key), val) + +#define BSON_APPEND_TIMESTAMP(b, key, val, inc) \ + bson_append_timestamp (b, key, (int) strlen (key), val, inc) + +#define BSON_APPEND_UNDEFINED(b, key) \ + bson_append_undefined (b, key, (int) strlen (key)) + +#define BSON_APPEND_VALUE(b, key, val) \ + bson_append_value (b, key, (int) strlen (key), (val)) + + +/** + * bson_new: + * + * Allocates a new bson_t structure. Call the various bson_append_*() + * functions to add fields to the bson. You can iterate the bson_t at any + * time using a bson_iter_t and bson_iter_init(). + * + * Returns: A newly allocated bson_t that should be freed with bson_destroy(). + */ +BSON_EXPORT (bson_t *) +bson_new (void); + + +BSON_EXPORT (bson_t *) +bson_new_from_json (const uint8_t *data, ssize_t len, bson_error_t *error); + + +BSON_EXPORT (bool) +bson_init_from_json (bson_t *bson, + const char *data, + ssize_t len, + bson_error_t *error); + + +/** + * bson_init_static: + * @b: A pointer to a bson_t. + * @data: The data buffer to use. + * @length: The length of @data. + * + * Initializes a bson_t using @data and @length. This is ideal if you would + * like to use a stack allocation for your bson and do not need to grow the + * buffer. @data must be valid for the life of @b. + * + * Returns: true if initialized successfully; otherwise false. + */ +BSON_EXPORT (bool) +bson_init_static (bson_t *b, const uint8_t *data, size_t length); + + +/** + * bson_init: + * @b: A pointer to a bson_t. + * + * Initializes a bson_t for use. This function is useful to those that want a + * stack allocated bson_t. The usefulness of a stack allocated bson_t is + * marginal as the target buffer for content will still require heap + * allocations. It can help reduce heap fragmentation on allocators that do + * not employ SLAB/magazine semantics. + * + * You must call bson_destroy() with @b to release resources when you are done + * using @b. + */ +BSON_EXPORT (void) +bson_init (bson_t *b); + + +/** + * bson_reinit: + * @b: (inout): A bson_t. + * + * This is equivalent to calling bson_destroy() and bson_init() on a #bson_t. + * However, it will try to persist the existing malloc'd buffer if one exists. + * This is useful in cases where you want to reduce malloc overhead while + * building many documents. + */ +BSON_EXPORT (void) +bson_reinit (bson_t *b); + + +/** + * bson_new_from_data: + * @data: A buffer containing a serialized bson document. + * @length: The length of the document in bytes. + * + * Creates a new bson_t structure using the data provided. @data should contain + * at least @length bytes that can be copied into the new bson_t structure. + * + * Returns: A newly allocated bson_t that should be freed with bson_destroy(). + * If the first four bytes (little-endian) of data do not match @length, + * then NULL will be returned. + */ +BSON_EXPORT (bson_t *) +bson_new_from_data (const uint8_t *data, size_t length); + + +/** + * bson_new_from_buffer: + * @buf: A pointer to a buffer containing a serialized bson document. + * @buf_len: The length of the buffer in bytes. + * @realloc_fun: a realloc like function + * @realloc_fun_ctx: a context for the realloc function + * + * Creates a new bson_t structure using the data provided. @buf should contain + * a bson document, or null pointer should be passed for new allocations. + * + * Returns: A newly allocated bson_t that should be freed with bson_destroy(). + * The underlying buffer will be used and not be freed in destroy. + */ +BSON_EXPORT (bson_t *) +bson_new_from_buffer (uint8_t **buf, + size_t *buf_len, + bson_realloc_func realloc_func, + void *realloc_func_ctx); + + +/** + * bson_sized_new: + * @size: A size_t containing the number of bytes to allocate. + * + * This will allocate a new bson_t with enough bytes to hold a buffer + * sized @size. @size must be smaller than INT_MAX bytes. + * + * Returns: A newly allocated bson_t that should be freed with bson_destroy(). + */ +BSON_EXPORT (bson_t *) +bson_sized_new (size_t size); + + +/** + * bson_copy: + * @bson: A bson_t. + * + * Copies @bson into a newly allocated bson_t. You must call bson_destroy() + * when you are done with the resulting value to free its resources. + * + * Returns: A newly allocated bson_t that should be free'd with bson_destroy() + */ +BSON_EXPORT (bson_t *) +bson_copy (const bson_t *bson); + + +/** + * bson_copy_to: + * @src: The source bson_t. + * @dst: The destination bson_t. + * + * Initializes @dst and copies the content from @src into @dst. + */ +BSON_EXPORT (void) +bson_copy_to (const bson_t *src, bson_t *dst); + + +/** + * bson_copy_to_excluding: + * @src: A bson_t. + * @dst: A bson_t to initialize and copy into. + * @first_exclude: First field name to exclude. + * + * Copies @src into @dst excluding any field that is provided. + * This is handy for situations when you need to remove one or + * more fields in a bson_t. Note that bson_init() will be called + * on dst. + */ +BSON_EXPORT (void) +bson_copy_to_excluding (const bson_t *src, + bson_t *dst, + const char *first_exclude, + ...) BSON_GNUC_NULL_TERMINATED + BSON_GNUC_DEPRECATED_FOR (bson_copy_to_excluding_noinit); + +/** + * bson_copy_to_excluding_noinit: + * @src: A bson_t. + * @dst: A bson_t to initialize and copy into. + * @first_exclude: First field name to exclude. + * + * The same as bson_copy_to_excluding, but does not call bson_init() + * on the dst. This version should be preferred in new code, but the + * old function is left for backwards compatibility. + */ +BSON_EXPORT (void) +bson_copy_to_excluding_noinit (const bson_t *src, + bson_t *dst, + const char *first_exclude, + ...) BSON_GNUC_NULL_TERMINATED; + +BSON_EXPORT (void) +bson_copy_to_excluding_noinit_va (const bson_t *src, + bson_t *dst, + const char *first_exclude, + va_list args); + +/** + * bson_destroy: + * @bson: A bson_t. + * + * Frees the resources associated with @bson. + */ +BSON_EXPORT (void) +bson_destroy (bson_t *bson); + +BSON_EXPORT (uint8_t *) +bson_reserve_buffer (bson_t *bson, uint32_t size); + +BSON_EXPORT (bool) +bson_steal (bson_t *dst, bson_t *src); + + +/** + * bson_destroy_with_steal: + * @bson: A #bson_t. + * @steal: If ownership of the data buffer should be transferred to caller. + * @length: (out): location for the length of the buffer. + * + * Destroys @bson similar to calling bson_destroy() except that the underlying + * buffer will be returned and ownership transferred to the caller if @steal + * is non-zero. + * + * If length is non-NULL, the length of @bson will be stored in @length. + * + * It is a programming error to call this function with any bson that has + * been initialized static, or is being used to create a subdocument with + * functions such as bson_append_document_begin() or bson_append_array_begin(). + * + * Returns: a buffer owned by the caller if @steal is true. Otherwise NULL. + * If there was an error, NULL is returned. + */ +BSON_EXPORT (uint8_t *) +bson_destroy_with_steal (bson_t *bson, bool steal, uint32_t *length); + + +/** + * bson_get_data: + * @bson: A bson_t. + * + * Fetched the data buffer for @bson of @bson->len bytes in length. + * + * Returns: A buffer that should not be modified or freed. + */ +BSON_EXPORT (const uint8_t *) +bson_get_data (const bson_t *bson); + + +/** + * bson_count_keys: + * @bson: A bson_t. + * + * Counts the number of elements found in @bson. + */ +BSON_EXPORT (uint32_t) +bson_count_keys (const bson_t *bson); + + +/** + * bson_has_field: + * @bson: A bson_t. + * @key: The key to lookup. + * + * Checks to see if @bson contains a field named @key. + * + * This function is case-sensitive. + * + * Returns: true if @key exists in @bson; otherwise false. + */ +BSON_EXPORT (bool) +bson_has_field (const bson_t *bson, const char *key); + + +/** + * bson_compare: + * @bson: A bson_t. + * @other: A bson_t. + * + * Compares @bson to @other in a qsort() style comparison. + * See qsort() for information on how this function works. + * + * Returns: Less than zero, zero, or greater than zero. + */ +BSON_EXPORT (int) +bson_compare (const bson_t *bson, const bson_t *other); + +/* + * bson_compare: + * @bson: A bson_t. + * @other: A bson_t. + * + * Checks to see if @bson and @other are equal. + * + * Returns: true if equal; otherwise false. + */ +BSON_EXPORT (bool) +bson_equal (const bson_t *bson, const bson_t *other); + + +/** + * bson_validate: + * @bson: A bson_t. + * @offset: A location for the error offset. + * + * Validates a BSON document by walking through the document and inspecting + * the fields for valid content. + * + * Returns: true if @bson is valid; otherwise false and @offset is set. + */ +BSON_EXPORT (bool) +bson_validate (const bson_t *bson, bson_validate_flags_t flags, size_t *offset); + + +/** + * bson_validate_with_error: + * @bson: A bson_t. + * @error: A location for the error info. + * + * Validates a BSON document by walking through the document and inspecting + * the fields for valid content. + * + * Returns: true if @bson is valid; otherwise false and @error is filled out. + */ +BSON_EXPORT (bool) +bson_validate_with_error (const bson_t *bson, + bson_validate_flags_t flags, + bson_error_t *error); + + +/** + * bson_as_canonical_extended_json: + * @bson: A bson_t. + * @length: A location for the string length, or NULL. + * + * Creates a new string containing @bson in canonical extended JSON format, + * conforming to the MongoDB Extended JSON Spec: + * + * github.com/mongodb/specifications/blob/master/source/extended-json.rst + * + * The caller is responsible for freeing the resulting string. If @length is + * non-NULL, then the length of the resulting string will be placed in @length. + * + * See http://docs.mongodb.org/manual/reference/mongodb-extended-json/ for + * more information on extended JSON. + * + * Returns: A newly allocated string that should be freed with bson_free(). + */ +BSON_EXPORT (char *) +bson_as_canonical_extended_json (const bson_t *bson, size_t *length); + + +/** + * bson_as_json: + * @bson: A bson_t. + * @length: A location for the string length, or NULL. + * + * Creates a new string containing @bson in libbson's legacy JSON format. + * Superseded by bson_as_canonical_extended_json and + * bson_as_relaxed_extended_json. The caller is + * responsible for freeing the resulting string. If @length is non-NULL, then + * the length of the resulting string will be placed in @length. + * + * Returns: A newly allocated string that should be freed with bson_free(). + */ +BSON_EXPORT (char *) +bson_as_json (const bson_t *bson, size_t *length); + + +/** + * bson_as_relaxed_extended_json: + * @bson: A bson_t. + * @length: A location for the string length, or NULL. + * + * Creates a new string containing @bson in relaxed extended JSON format, + * conforming to the MongoDB Extended JSON Spec: + * + * github.com/mongodb/specifications/blob/master/source/extended-json.rst + * + * The caller is responsible for freeing the resulting string. If @length is + * non-NULL, then the length of the resulting string will be placed in @length. + * + * See http://docs.mongodb.org/manual/reference/mongodb-extended-json/ for + * more information on extended JSON. + * + * Returns: A newly allocated string that should be freed with bson_free(). + */ +BSON_EXPORT (char *) +bson_as_relaxed_extended_json (const bson_t *bson, size_t *length); + + +/* like bson_as_json() but for outermost arrays. */ +BSON_EXPORT (char *) +bson_array_as_json (const bson_t *bson, size_t *length); + + +BSON_EXPORT (bool) +bson_append_value (bson_t *bson, + const char *key, + int key_length, + const bson_value_t *value); + + +/** + * bson_append_array: + * @bson: A bson_t. + * @key: The key for the field. + * @array: A bson_t containing the array. + * + * Appends a BSON array to @bson. BSON arrays are like documents where the + * key is the string version of the index. For example, the first item of the + * array would have the key "0". The second item would have the index "1". + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_array (bson_t *bson, + const char *key, + int key_length, + const bson_t *array); + + +/** + * bson_append_binary: + * @bson: A bson_t to append. + * @key: The key for the field. + * @subtype: The bson_subtype_t of the binary. + * @binary: The binary buffer to append. + * @length: The length of @binary. + * + * Appends a binary buffer to the BSON document. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_binary (bson_t *bson, + const char *key, + int key_length, + bson_subtype_t subtype, + const uint8_t *binary, + uint32_t length); + + +/** + * bson_append_bool: + * @bson: A bson_t. + * @key: The key for the field. + * @value: The boolean value. + * + * Appends a new field to @bson of type BSON_TYPE_BOOL. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_bool (bson_t *bson, const char *key, int key_length, bool value); + + +/** + * bson_append_code: + * @bson: A bson_t. + * @key: The key for the document. + * @javascript: JavaScript code to be executed. + * + * Appends a field of type BSON_TYPE_CODE to the BSON document. @javascript + * should contain a script in javascript to be executed. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_code (bson_t *bson, + const char *key, + int key_length, + const char *javascript); + + +/** + * bson_append_code_with_scope: + * @bson: A bson_t. + * @key: The key for the document. + * @javascript: JavaScript code to be executed. + * @scope: A bson_t containing the scope for @javascript. + * + * Appends a field of type BSON_TYPE_CODEWSCOPE to the BSON document. + * @javascript should contain a script in javascript to be executed. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_code_with_scope (bson_t *bson, + const char *key, + int key_length, + const char *javascript, + const bson_t *scope); + + +/** + * bson_append_dbpointer: + * @bson: A bson_t. + * @key: The key for the field. + * @collection: The collection name. + * @oid: The oid to the reference. + * + * Appends a new field of type BSON_TYPE_DBPOINTER. This datum type is + * deprecated in the BSON spec and should not be used in new code. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_dbpointer (bson_t *bson, + const char *key, + int key_length, + const char *collection, + const bson_oid_t *oid); + + +/** + * bson_append_double: + * @bson: A bson_t. + * @key: The key for the field. + * + * Appends a new field to @bson of the type BSON_TYPE_DOUBLE. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_double (bson_t *bson, + const char *key, + int key_length, + double value); + + +/** + * bson_append_document: + * @bson: A bson_t. + * @key: The key for the field. + * @value: A bson_t containing the subdocument. + * + * Appends a new field to @bson of the type BSON_TYPE_DOCUMENT. + * The documents contents will be copied into @bson. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_document (bson_t *bson, + const char *key, + int key_length, + const bson_t *value); + + +/** + * bson_append_document_begin: + * @bson: A bson_t. + * @key: The key for the field. + * @key_length: The length of @key in bytes not including NUL or -1 + * if @key_length is NUL terminated. + * @child: A location to an uninitialized bson_t. + * + * Appends a new field named @key to @bson. The field is, however, + * incomplete. @child will be initialized so that you may add fields to the + * child document. Child will use a memory buffer owned by @bson and + * therefore grow the parent buffer as additional space is used. This allows + * a single malloc'd buffer to be used when building documents which can help + * reduce memory fragmentation. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_document_begin (bson_t *bson, + const char *key, + int key_length, + bson_t *child); + + +/** + * bson_append_document_end: + * @bson: A bson_t. + * @child: A bson_t supplied to bson_append_document_begin(). + * + * Finishes the appending of a document to a @bson. @child is considered + * disposed after this call and should not be used any further. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_document_end (bson_t *bson, bson_t *child); + + +/** + * bson_append_array_begin: + * @bson: A bson_t. + * @key: The key for the field. + * @key_length: The length of @key in bytes not including NUL or -1 + * if @key_length is NUL terminated. + * @child: A location to an uninitialized bson_t. + * + * Appends a new field named @key to @bson. The field is, however, + * incomplete. @child will be initialized so that you may add fields to the + * child array. Child will use a memory buffer owned by @bson and + * therefore grow the parent buffer as additional space is used. This allows + * a single malloc'd buffer to be used when building arrays which can help + * reduce memory fragmentation. + * + * The type of @child will be BSON_TYPE_ARRAY and therefore the keys inside + * of it MUST be "0", "1", etc. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_array_begin (bson_t *bson, + const char *key, + int key_length, + bson_t *child); + + +/** + * bson_append_array_end: + * @bson: A bson_t. + * @child: A bson_t supplied to bson_append_array_begin(). + * + * Finishes the appending of a array to a @bson. @child is considered + * disposed after this call and should not be used any further. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_array_end (bson_t *bson, bson_t *child); + + +/** + * bson_append_int32: + * @bson: A bson_t. + * @key: The key for the field. + * @value: The int32_t 32-bit integer value. + * + * Appends a new field of type BSON_TYPE_INT32 to @bson. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_int32 (bson_t *bson, + const char *key, + int key_length, + int32_t value); + + +/** + * bson_append_int64: + * @bson: A bson_t. + * @key: The key for the field. + * @value: The int64_t 64-bit integer value. + * + * Appends a new field of type BSON_TYPE_INT64 to @bson. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_int64 (bson_t *bson, + const char *key, + int key_length, + int64_t value); + + +/** + * bson_append_decimal128: + * @bson: A bson_t. + * @key: The key for the field. + * @value: The bson_decimal128_t decimal128 value. + * + * Appends a new field of type BSON_TYPE_DECIMAL128 to @bson. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_decimal128 (bson_t *bson, + const char *key, + int key_length, + const bson_decimal128_t *value); + + +/** + * bson_append_iter: + * @bson: A bson_t to append to. + * @key: The key name or %NULL to take current key from @iter. + * @key_length: The key length or -1 to use strlen(). + * @iter: The iter located on the position of the element to append. + * + * Appends a new field to @bson that is equivalent to the field currently + * pointed to by @iter. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_iter (bson_t *bson, + const char *key, + int key_length, + const bson_iter_t *iter); + + +/** + * bson_append_minkey: + * @bson: A bson_t. + * @key: The key for the field. + * + * Appends a new field of type BSON_TYPE_MINKEY to @bson. This is a special + * type that compares lower than all other possible BSON element values. + * + * See http://bsonspec.org for more information on this type. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_minkey (bson_t *bson, const char *key, int key_length); + + +/** + * bson_append_maxkey: + * @bson: A bson_t. + * @key: The key for the field. + * + * Appends a new field of type BSON_TYPE_MAXKEY to @bson. This is a special + * type that compares higher than all other possible BSON element values. + * + * See http://bsonspec.org for more information on this type. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_maxkey (bson_t *bson, const char *key, int key_length); + + +/** + * bson_append_null: + * @bson: A bson_t. + * @key: The key for the field. + * + * Appends a new field to @bson with NULL for the value. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_null (bson_t *bson, const char *key, int key_length); + + +/** + * bson_append_oid: + * @bson: A bson_t. + * @key: The key for the field. + * @oid: bson_oid_t. + * + * Appends a new field to the @bson of type BSON_TYPE_OID using the contents of + * @oid. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_oid (bson_t *bson, + const char *key, + int key_length, + const bson_oid_t *oid); + + +/** + * bson_append_regex: + * @bson: A bson_t. + * @key: The key of the field. + * @regex: The regex to append to the bson. + * @options: Options for @regex. + * + * Appends a new field to @bson of type BSON_TYPE_REGEX. @regex should + * be the regex string. @options should contain the options for the regex. + * + * Valid options for @options are: + * + * 'i' for case-insensitive. + * 'm' for multiple matching. + * 'x' for verbose mode. + * 'l' to make \w and \W locale dependent. + * 's' for dotall mode ('.' matches everything) + * 'u' to make \w and \W match unicode. + * + * For more detailed information about BSON regex elements, see bsonspec.org. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_regex (bson_t *bson, + const char *key, + int key_length, + const char *regex, + const char *options); + + +/** + * bson_append_regex: + * @bson: A bson_t. + * @key: The key of the field. + * @key_length: The length of the key string. + * @regex: The regex to append to the bson. + * @regex_length: The length of the regex string. + * @options: Options for @regex. + * + * Appends a new field to @bson of type BSON_TYPE_REGEX. @regex should + * be the regex string. @options should contain the options for the regex. + * + * Valid options for @options are: + * + * 'i' for case-insensitive. + * 'm' for multiple matching. + * 'x' for verbose mode. + * 'l' to make \w and \W locale dependent. + * 's' for dotall mode ('.' matches everything) + * 'u' to make \w and \W match unicode. + * + * For more detailed information about BSON regex elements, see bsonspec.org. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_regex_w_len (bson_t *bson, + const char *key, + int key_length, + const char *regex, + int regex_length, + const char *options); + + +/** + * bson_append_utf8: + * @bson: A bson_t. + * @key: The key for the field. + * @value: A UTF-8 encoded string. + * @length: The length of @value or -1 if it is NUL terminated. + * + * Appends a new field to @bson using @key as the key and @value as the UTF-8 + * encoded value. + * + * It is the callers responsibility to ensure @value is valid UTF-8. You can + * use bson_utf8_validate() to perform this check. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_utf8 (bson_t *bson, + const char *key, + int key_length, + const char *value, + int length); + + +/** + * bson_append_symbol: + * @bson: A bson_t. + * @key: The key for the field. + * @value: The symbol as a string. + * @length: The length of @value or -1 if NUL-terminated. + * + * Appends a new field to @bson of type BSON_TYPE_SYMBOL. This BSON type is + * deprecated and should not be used in new code. + * + * See http://bsonspec.org for more information on this type. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_symbol (bson_t *bson, + const char *key, + int key_length, + const char *value, + int length); + + +/** + * bson_append_time_t: + * @bson: A bson_t. + * @key: The key for the field. + * @value: A time_t. + * + * Appends a BSON_TYPE_DATE_TIME field to @bson using the time_t @value for the + * number of seconds since UNIX epoch in UTC. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_time_t (bson_t *bson, + const char *key, + int key_length, + time_t value); + + +/** + * bson_append_timeval: + * @bson: A bson_t. + * @key: The key for the field. + * @value: A struct timeval containing the date and time. + * + * Appends a BSON_TYPE_DATE_TIME field to @bson using the struct timeval + * provided. The time is persisted in milliseconds since the UNIX epoch in UTC. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_timeval (bson_t *bson, + const char *key, + int key_length, + struct timeval *value); + + +/** + * bson_append_date_time: + * @bson: A bson_t. + * @key: The key for the field. + * @key_length: The length of @key in bytes or -1 if \0 terminated. + * @value: The number of milliseconds elapsed since UNIX epoch. + * + * Appends a new field to @bson of type BSON_TYPE_DATE_TIME. + * + * Returns: true if successful; otherwise false. + */ +BSON_EXPORT (bool) +bson_append_date_time (bson_t *bson, + const char *key, + int key_length, + int64_t value); + + +/** + * bson_append_now_utc: + * @bson: A bson_t. + * @key: The key for the field. + * @key_length: The length of @key or -1 if it is NULL terminated. + * + * Appends a BSON_TYPE_DATE_TIME field to @bson using the current time in UTC + * as the field value. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_now_utc (bson_t *bson, const char *key, int key_length); + +/** + * bson_append_timestamp: + * @bson: A bson_t. + * @key: The key for the field. + * @timestamp: 4 byte timestamp. + * @increment: 4 byte increment for timestamp. + * + * Appends a field of type BSON_TYPE_TIMESTAMP to @bson. This is a special type + * used by MongoDB replication and sharding. If you need generic time and date + * fields use bson_append_time_t() or bson_append_timeval(). + * + * Setting @increment and @timestamp to zero has special semantics. See + * http://bsonspec.org for more information on this field type. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_timestamp (bson_t *bson, + const char *key, + int key_length, + uint32_t timestamp, + uint32_t increment); + + +/** + * bson_append_undefined: + * @bson: A bson_t. + * @key: The key for the field. + * + * Appends a field of type BSON_TYPE_UNDEFINED. This type is deprecated in the + * spec and should not be used for new code. However, it is provided for those + * needing to interact with legacy systems. + * + * Returns: true if successful; false if append would overflow max size. + */ +BSON_EXPORT (bool) +bson_append_undefined (bson_t *bson, const char *key, int key_length); + + +BSON_EXPORT (bool) +bson_concat (bson_t *dst, const bson_t *src); + + +BSON_END_DECLS + + +#endif /* BSON_H */ diff --git a/include/libmseed.h b/include/libmseed.h new file mode 100644 index 0000000..5d1aea2 --- /dev/null +++ b/include/libmseed.h @@ -0,0 +1,838 @@ +/*************************************************************************** + * libmseed.h: + * + * Interface declarations for the Mini-SEED library (libmseed). + * + * This library is free software; you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as + * published by the Free Software Foundation; either version 3 of the + * License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License (GNU-LGPL) for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this software. + * If not, see . + * + * Copyright (C) 2017 Chad Trabant + * IRIS Data Management Center + ***************************************************************************/ + +#ifndef LIBMSEED_H +#define LIBMSEED_H 1 + +#ifdef __cplusplus +extern "C" { +#endif + +#define LIBMSEED_VERSION "2.19.6" +#define LIBMSEED_RELEASE "2018.240" + +/* C99 standard headers */ +#include +#include +#include +#include +#include +#include + +/* This library uses structs that map to SEED header/blockette + structures that are required to have a layout exactly as specified, + i.e. no padding. + + If "ATTRIBUTE_PACKED" is defined at compile time (e.g. -DATTRIBUTE_PACKED) + the preprocessor will use the define below to add the "packed" attribute + to effected structs. This attribute is supported by GCC and increasingly + more compilers. + */ +#if defined(ATTRIBUTE_PACKED) + #define LMP_PACKED __attribute__((packed)) +#else + #define LMP_PACKED +#endif + +/* Set platform specific defines */ +#if defined(__linux__) || defined(__linux) || defined(__CYGWIN__) + #define LMP_LINUX 1 + #define LMP_GLIBC2 1 /* Deprecated */ +#elif defined(__APPLE__) || defined(__FreeBSD__) || defined(__OpenBSD__) || defined(__NetBSD__) + #define LMP_BSD 1 +#elif defined(__sun__) || defined(__sun) + #define LMP_SOLARIS 1 +#elif defined(WIN32) || defined(_WIN32) || defined(WIN64) || defined(_WIN64) + #define LMP_WIN 1 + #define LMP_WIN32 1 /* Deprecated */ +#endif + +/* Set platform specific features */ +#if defined(LMP_WIN) + #include + #include + + /* For MSVC 2012 and earlier define standard int types, otherwise use inttypes.h */ + #if defined(_MSC_VER) && _MSC_VER <= 1700 + typedef signed char int8_t; + typedef unsigned char uint8_t; + typedef signed short int int16_t; + typedef unsigned short int uint16_t; + typedef signed int int32_t; + typedef unsigned int uint32_t; + typedef signed __int64 int64_t; + typedef unsigned __int64 uint64_t; + #else + #include + #endif + + /* For MSVC define PRId64 and SCNd64 if needed */ + #if defined(_MSC_VER) + #if !defined(PRId64) + #define PRId64 "I64d" + #endif + #if !defined(SCNd64) + #define SCNd64 "I64d" + #endif + + #define snprintf _snprintf + #define vsnprintf _vsnprintf + #define strcasecmp _stricmp + #define strncasecmp _strnicmp + #define strtoull _strtoui64 + #define strdup _strdup + #define fileno _fileno + #endif + + /* Extras needed for MinGW */ + #if defined(__MINGW32__) || defined(__MINGW64__) + #include + + #define fstat _fstat + #define stat _stat + #endif +#else + #include + #include +#endif + +extern int LM_SIZEOF_OFF_T; /* Size of off_t data type determined at build time */ + +#define MINRECLEN 128 /* Minimum Mini-SEED record length, 2^7 bytes */ + /* Note: the SEED specification minimum is 256 */ +#define MAXRECLEN 1048576 /* Maximum Mini-SEED record length, 2^20 bytes */ + +/* SEED data encoding types */ +#define DE_ASCII 0 +#define DE_INT16 1 +#define DE_INT32 3 +#define DE_FLOAT32 4 +#define DE_FLOAT64 5 +#define DE_STEIM1 10 +#define DE_STEIM2 11 +#define DE_GEOSCOPE24 12 +#define DE_GEOSCOPE163 13 +#define DE_GEOSCOPE164 14 +#define DE_CDSN 16 +#define DE_SRO 30 +#define DE_DWWSSN 32 + +/* Library return and error code values, error values should always be negative */ +#define MS_ENDOFFILE 1 /* End of file reached return value */ +#define MS_NOERROR 0 /* No error */ +#define MS_GENERROR -1 /* Generic unspecified error */ +#define MS_NOTSEED -2 /* Data not SEED */ +#define MS_WRONGLENGTH -3 /* Length of data read was not correct */ +#define MS_OUTOFRANGE -4 /* SEED record length out of range */ +#define MS_UNKNOWNFORMAT -5 /* Unknown data encoding format */ +#define MS_STBADCOMPFLAG -6 /* Steim, invalid compression flag(s) */ + +/* Define the high precision time tick interval as 1/modulus seconds */ +/* Default modulus of 1000000 defines tick interval as a microsecond */ +#define HPTMODULUS 1000000 + +/* Error code for routines that normally return a high precision time. + * The time value corresponds to '1902/1/1 00:00:00.000000' with the + * default HPTMODULUS */ +#define HPTERROR -2145916800000000LL + +/* Macros to scale between Unix/POSIX epoch time & high precision time */ +#define MS_EPOCH2HPTIME(X) X * (hptime_t) HPTMODULUS +#define MS_HPTIME2EPOCH(X) X / HPTMODULUS + +/* Macro to test a character for data record indicators */ +#define MS_ISDATAINDICATOR(X) (X=='D' || X=='R' || X=='Q' || X=='M') + +/* Macro to test default sample rate tolerance: abs(1-sr1/sr2) < 0.0001 */ +#define MS_ISRATETOLERABLE(A,B) (ms_dabs (1.0 - (A / B)) < 0.0001) + +/* Macro to test for sane year and day values, used primarily to + * determine if byte order swapping is needed. + * + * Year : between 1900 and 2100 + * Day : between 1 and 366 + * + * This test is non-unique (non-deterministic) for days 1, 256 and 257 + * in the year 2056 because the swapped values are also within range. + */ +#define MS_ISVALIDYEARDAY(Y,D) (Y >= 1900 && Y <= 2100 && D >= 1 && D <= 366) + +/* Macro to test memory for a SEED data record signature by checking + * SEED data record header values at known byte offsets to determine + * if the memory contains a valid record. + * + * Offset = Value + * [0-5] = Digits, spaces or NULL, SEED sequence number + * 6 = Data record quality indicator + * 7 = Space or NULL [not valid SEED] + * 24 = Start hour (0-23) + * 25 = Start minute (0-59) + * 26 = Start second (0-60) + * + * Usage: + * MS_ISVALIDHEADER ((char *)X) X buffer must contain at least 27 bytes + */ +#define MS_ISVALIDHEADER(X) ( \ + (isdigit ((int) *(X)) || *(X) == ' ' || !*(X) ) && \ + (isdigit ((int) *(X+1)) || *(X+1) == ' ' || !*(X+1) ) && \ + (isdigit ((int) *(X+2)) || *(X+2) == ' ' || !*(X+2) ) && \ + (isdigit ((int) *(X+3)) || *(X+3) == ' ' || !*(X+3) ) && \ + (isdigit ((int) *(X+4)) || *(X+4) == ' ' || !*(X+4) ) && \ + (isdigit ((int) *(X+5)) || *(X+5) == ' ' || !*(X+5) ) && \ + MS_ISDATAINDICATOR(*(X+6)) && \ + (*(X+7) == ' ' || *(X+7) == '\0') && \ + (int)(*(X+24)) >= 0 && (int)(*(X+24)) <= 23 && \ + (int)(*(X+25)) >= 0 && (int)(*(X+25)) <= 59 && \ + (int)(*(X+26)) >= 0 && (int)(*(X+26)) <= 60 ) + +/* Macro to test memory for a blank/noise SEED data record signature + * by checking for a valid SEED sequence number and padding characters + * to determine if the memory contains a valid blank/noise record. + * + * Offset = Value + * [0-5] = Digits or NULL, SEED sequence number + * [6-47] = Space character (ASCII 32), remainder of fixed header + * + * Usage: + * MS_ISVALIDBLANK ((char *)X) X buffer must contain at least 27 bytes + */ +#define MS_ISVALIDBLANK(X) ( \ + (isdigit ((int) *(X)) || !*(X) ) && \ + (isdigit ((int) *(X+1)) || !*(X+1) ) && \ + (isdigit ((int) *(X+2)) || !*(X+2) ) && \ + (isdigit ((int) *(X+3)) || !*(X+3) ) && \ + (isdigit ((int) *(X+4)) || !*(X+4) ) && \ + (isdigit ((int) *(X+5)) || !*(X+5) ) && \ + (*(X+6) ==' ') && (*(X+7) ==' ') && (*(X+8) ==' ') && \ + (*(X+9) ==' ') && (*(X+10)==' ') && (*(X+11)==' ') && \ + (*(X+12)==' ') && (*(X+13)==' ') && (*(X+14)==' ') && \ + (*(X+15)==' ') && (*(X+16)==' ') && (*(X+17)==' ') && \ + (*(X+18)==' ') && (*(X+19)==' ') && (*(X+20)==' ') && \ + (*(X+21)==' ') && (*(X+22)==' ') && (*(X+23)==' ') && \ + (*(X+24)==' ') && (*(X+25)==' ') && (*(X+26)==' ') && \ + (*(X+27)==' ') && (*(X+28)==' ') && (*(X+29)==' ') && \ + (*(X+30)==' ') && (*(X+31)==' ') && (*(X+32)==' ') && \ + (*(X+33)==' ') && (*(X+34)==' ') && (*(X+35)==' ') && \ + (*(X+36)==' ') && (*(X+37)==' ') && (*(X+38)==' ') && \ + (*(X+39)==' ') && (*(X+40)==' ') && (*(X+41)==' ') && \ + (*(X+42)==' ') && (*(X+43)==' ') && (*(X+44)==' ') && \ + (*(X+45)==' ') && (*(X+46)==' ') && (*(X+47)==' ') ) + +/* A simple bitwise AND test to return 0 or 1 */ +#define bit(x,y) (x&y)?1:0 + +/* Require a large (>= 64-bit) integer type for hptime_t */ +typedef int64_t hptime_t; + +/* A single byte flag type */ +typedef int8_t flag; + +/* SEED binary time */ +typedef struct btime_s +{ + uint16_t year; + uint16_t day; + uint8_t hour; + uint8_t min; + uint8_t sec; + uint8_t unused; + uint16_t fract; +} LMP_PACKED +BTime; + +/* Fixed section data of header */ +struct fsdh_s +{ + char sequence_number[6]; + char dataquality; + char reserved; + char station[5]; + char location[2]; + char channel[3]; + char network[2]; + BTime start_time; + uint16_t numsamples; + int16_t samprate_fact; + int16_t samprate_mult; + uint8_t act_flags; + uint8_t io_flags; + uint8_t dq_flags; + uint8_t numblockettes; + int32_t time_correct; + uint16_t data_offset; + uint16_t blockette_offset; +} LMP_PACKED; + +/* Blockette 100, Sample Rate (without header) */ +struct blkt_100_s +{ + float samprate; + int8_t flags; + uint8_t reserved[3]; +} LMP_PACKED; + +/* Blockette 200, Generic Event Detection (without header) */ +struct blkt_200_s +{ + float amplitude; + float period; + float background_estimate; + uint8_t flags; + uint8_t reserved; + BTime time; + char detector[24]; +} LMP_PACKED; + +/* Blockette 201, Murdock Event Detection (without header) */ +struct blkt_201_s +{ + float amplitude; + float period; + float background_estimate; + uint8_t flags; + uint8_t reserved; + BTime time; + uint8_t snr_values[6]; + uint8_t loopback; + uint8_t pick_algorithm; + char detector[24]; +} LMP_PACKED; + +/* Blockette 300, Step Calibration (without header) */ +struct blkt_300_s +{ + BTime time; + uint8_t numcalibrations; + uint8_t flags; + uint32_t step_duration; + uint32_t interval_duration; + float amplitude; + char input_channel[3]; + uint8_t reserved; + uint32_t reference_amplitude; + char coupling[12]; + char rolloff[12]; +} LMP_PACKED; + +/* Blockette 310, Sine Calibration (without header) */ +struct blkt_310_s +{ + BTime time; + uint8_t reserved1; + uint8_t flags; + uint32_t duration; + float period; + float amplitude; + char input_channel[3]; + uint8_t reserved2; + uint32_t reference_amplitude; + char coupling[12]; + char rolloff[12]; +} LMP_PACKED; + +/* Blockette 320, Pseudo-random Calibration (without header) */ +struct blkt_320_s +{ + BTime time; + uint8_t reserved1; + uint8_t flags; + uint32_t duration; + float ptp_amplitude; + char input_channel[3]; + uint8_t reserved2; + uint32_t reference_amplitude; + char coupling[12]; + char rolloff[12]; + char noise_type[8]; +} LMP_PACKED; + +/* Blockette 390, Generic Calibration (without header) */ +struct blkt_390_s +{ + BTime time; + uint8_t reserved1; + uint8_t flags; + uint32_t duration; + float amplitude; + char input_channel[3]; + uint8_t reserved2; +} LMP_PACKED; + +/* Blockette 395, Calibration Abort (without header) */ +struct blkt_395_s +{ + BTime time; + uint8_t reserved[2]; +} LMP_PACKED; + +/* Blockette 400, Beam (without header) */ +struct blkt_400_s +{ + float azimuth; + float slowness; + uint16_t configuration; + uint8_t reserved[2]; +} LMP_PACKED; + +/* Blockette 405, Beam Delay (without header) */ +struct blkt_405_s +{ + uint16_t delay_values[1]; +}; + +/* Blockette 500, Timing (without header) */ +struct blkt_500_s +{ + float vco_correction; + BTime time; + int8_t usec; + uint8_t reception_qual; + uint32_t exception_count; + char exception_type[16]; + char clock_model[32]; + char clock_status[128]; +} LMP_PACKED; + +/* Blockette 1000, Data Only SEED (without header) */ +struct blkt_1000_s +{ + uint8_t encoding; + uint8_t byteorder; + uint8_t reclen; + uint8_t reserved; +} LMP_PACKED; + +/* Blockette 1001, Data Extension (without header) */ +struct blkt_1001_s +{ + uint8_t timing_qual; + int8_t usec; + uint8_t reserved; + uint8_t framecnt; +} LMP_PACKED; + +/* Blockette 2000, Opaque Data (without header) */ +struct blkt_2000_s +{ + uint16_t length; + uint16_t data_offset; + uint32_t recnum; + uint8_t byteorder; + uint8_t flags; + uint8_t numheaders; + char payload[1]; +} LMP_PACKED; + +/* Blockette chain link, generic linkable blockette index */ +typedef struct blkt_link_s +{ + uint16_t blktoffset; /* Offset to this blockette */ + uint16_t blkt_type; /* Blockette type */ + uint16_t next_blkt; /* Offset to next blockette */ + void *blktdata; /* Blockette data */ + uint16_t blktdatalen; /* Length of blockette data in bytes */ + struct blkt_link_s *next; +} +BlktLink; + +typedef struct StreamState_s +{ + int64_t packedrecords; /* Count of packed records */ + int64_t packedsamples; /* Count of packed samples */ + int32_t lastintsample; /* Value of last integer sample packed */ + flag comphistory; /* Control use of lastintsample for compression history */ +} +StreamState; + +typedef struct MSRecord_s { + char *record; /* Mini-SEED record */ + int32_t reclen; /* Length of Mini-SEED record in bytes */ + + /* Pointers to SEED data record structures */ + struct fsdh_s *fsdh; /* Fixed Section of Data Header */ + BlktLink *blkts; /* Root of blockette chain */ + struct blkt_100_s *Blkt100; /* Blockette 100, if present */ + struct blkt_1000_s *Blkt1000; /* Blockette 1000, if present */ + struct blkt_1001_s *Blkt1001; /* Blockette 1001, if present */ + + /* Common header fields in accessible form */ + int32_t sequence_number; /* SEED record sequence number */ + char network[11]; /* Network designation, NULL terminated */ + char station[11]; /* Station designation, NULL terminated */ + char location[11]; /* Location designation, NULL terminated */ + char channel[11]; /* Channel designation, NULL terminated */ + char dataquality; /* Data quality indicator */ + hptime_t starttime; /* Record start time, corrected (first sample) */ + double samprate; /* Nominal sample rate (Hz) */ + int64_t samplecnt; /* Number of samples in record */ + int8_t encoding; /* Data encoding format */ + int8_t byteorder; /* Original/Final byte order of record */ + + /* Data sample fields */ + void *datasamples; /* Data samples, 'numsamples' of type 'sampletype'*/ + int64_t numsamples; /* Number of data samples in datasamples */ + char sampletype; /* Sample type code: a, i, f, d */ + + /* Stream oriented state information */ + StreamState *ststate; /* Stream processing state information */ +} +MSRecord; + +/* Container for a continuous trace, linkable */ +typedef struct MSTrace_s { + char network[11]; /* Network designation, NULL terminated */ + char station[11]; /* Station designation, NULL terminated */ + char location[11]; /* Location designation, NULL terminated */ + char channel[11]; /* Channel designation, NULL terminated */ + char dataquality; /* Data quality indicator */ + char type; /* MSTrace type code */ + hptime_t starttime; /* Time of first sample */ + hptime_t endtime; /* Time of last sample */ + double samprate; /* Nominal sample rate (Hz) */ + int64_t samplecnt; /* Number of samples in trace coverage */ + void *datasamples; /* Data samples, 'numsamples' of type 'sampletype' */ + int64_t numsamples; /* Number of data samples in datasamples */ + char sampletype; /* Sample type code: a, i, f, d */ + void *prvtptr; /* Private pointer for general use, unused by libmseed */ + StreamState *ststate; /* Stream processing state information */ + struct MSTrace_s *next; /* Pointer to next trace */ +} +MSTrace; + +/* Container for a group (chain) of traces */ +typedef struct MSTraceGroup_s { + int32_t numtraces; /* Number of MSTraces in the trace chain */ + struct MSTrace_s *traces; /* Root of the trace chain */ +} +MSTraceGroup; + +/* Container for a continuous trace segment, linkable */ +typedef struct MSTraceSeg_s { + hptime_t starttime; /* Time of first sample */ + hptime_t endtime; /* Time of last sample */ + double samprate; /* Nominal sample rate (Hz) */ + int64_t samplecnt; /* Number of samples in trace coverage */ + void *datasamples; /* Data samples, 'numsamples' of type 'sampletype'*/ + int64_t numsamples; /* Number of data samples in datasamples */ + char sampletype; /* Sample type code: a, i, f, d */ + void *prvtptr; /* Private pointer for general use, unused by libmseed */ + struct MSTraceSeg_s *prev; /* Pointer to previous segment */ + struct MSTraceSeg_s *next; /* Pointer to next segment */ +} +MSTraceSeg; + +/* Container for a trace ID, linkable */ +typedef struct MSTraceID_s { + char network[11]; /* Network designation, NULL terminated */ + char station[11]; /* Station designation, NULL terminated */ + char location[11]; /* Location designation, NULL terminated */ + char channel[11]; /* Channel designation, NULL terminated */ + char dataquality; /* Data quality indicator */ + char srcname[45]; /* Source name (Net_Sta_Loc_Chan_Qual), NULL terminated */ + char type; /* Trace type code */ + hptime_t earliest; /* Time of earliest sample */ + hptime_t latest; /* Time of latest sample */ + void *prvtptr; /* Private pointer for general use, unused by libmseed */ + int32_t numsegments; /* Number of segments for this ID */ + struct MSTraceSeg_s *first; /* Pointer to first of list of segments */ + struct MSTraceSeg_s *last; /* Pointer to last of list of segments */ + struct MSTraceID_s *next; /* Pointer to next trace */ +} +MSTraceID; + +/* Container for a continuous trace segment, linkable */ +typedef struct MSTraceList_s { + int32_t numtraces; /* Number of traces in list */ + struct MSTraceID_s *traces; /* Pointer to list of traces */ + struct MSTraceID_s *last; /* Pointer to last used trace in list */ +} +MSTraceList; + +/* Data selection structure time window definition containers */ +typedef struct SelectTime_s { + hptime_t starttime; /* Earliest data for matching channels */ + hptime_t endtime; /* Latest data for matching channels */ + struct SelectTime_s *next; +} SelectTime; + +/* Data selection structure definition containers */ +typedef struct Selections_s { + char srcname[100]; /* Matching (globbing) source name: Net_Sta_Loc_Chan_Qual */ + struct SelectTime_s *timewindows; + struct Selections_s *next; +} Selections; + + +/* Global variables (defined in pack.c) and macros to set/force + * pack byte orders */ +extern flag packheaderbyteorder; +extern flag packdatabyteorder; +#define MS_PACKHEADERBYTEORDER(X) (packheaderbyteorder = X); +#define MS_PACKDATABYTEORDER(X) (packdatabyteorder = X); + +/* Global variables (defined in unpack.c) and macros to set/force + * unpack byte orders */ +extern flag unpackheaderbyteorder; +extern flag unpackdatabyteorder; +#define MS_UNPACKHEADERBYTEORDER(X) (unpackheaderbyteorder = X); +#define MS_UNPACKDATABYTEORDER(X) (unpackdatabyteorder = X); + +/* Global variables (defined in unpack.c) and macros to set/force + * encoding and fallback encoding */ +extern int unpackencodingformat; +extern int unpackencodingfallback; +#define MS_UNPACKENCODINGFORMAT(X) (unpackencodingformat = X); +#define MS_UNPACKENCODINGFALLBACK(X) (unpackencodingfallback = X); + +/* Mini-SEED record related functions */ +extern int msr_parse (char *record, int recbuflen, MSRecord **ppmsr, int reclen, + flag dataflag, flag verbose); + +extern int msr_parse_selection ( char *recbuf, int recbuflen, int64_t *offset, + MSRecord **ppmsr, int reclen, + Selections *selections, flag dataflag, flag verbose ); + +extern int msr_unpack (char *record, int reclen, MSRecord **ppmsr, + flag dataflag, flag verbose); + +extern int msr_pack (MSRecord *msr, void (*record_handler) (char *, int, void *), + void *handlerdata, int64_t *packedsamples, flag flush, flag verbose ); + +extern int msr_pack_header (MSRecord *msr, flag normalize, flag verbose); + +extern int msr_unpack_data (MSRecord *msr, int swapflag, flag verbose); + +extern MSRecord* msr_init (MSRecord *msr); +extern void msr_free (MSRecord **ppmsr); +extern void msr_free_blktchain (MSRecord *msr); +extern BlktLink* msr_addblockette (MSRecord *msr, char *blktdata, int length, + int blkttype, int chainpos); +extern int msr_normalize_header (MSRecord *msr, flag verbose); +extern MSRecord* msr_duplicate (MSRecord *msr, flag datadup); +extern double msr_samprate (MSRecord *msr); +extern double msr_nomsamprate (MSRecord *msr); +extern hptime_t msr_starttime (MSRecord *msr); +extern hptime_t msr_starttime_uc (MSRecord *msr); +extern hptime_t msr_endtime (MSRecord *msr); +extern char* msr_srcname (MSRecord *msr, char *srcname, flag quality); +extern void msr_print (MSRecord *msr, flag details); +extern double msr_host_latency (MSRecord *msr); + +extern int ms_detect (const char *record, int recbuflen); +extern int ms_parse_raw (char *record, int maxreclen, flag details, flag swapflag); + + +/* MSTrace related functions */ +extern MSTrace* mst_init (MSTrace *mst); +extern void mst_free (MSTrace **ppmst); +extern MSTraceGroup* mst_initgroup (MSTraceGroup *mstg); +extern void mst_freegroup (MSTraceGroup **ppmstg); +extern MSTrace* mst_findmatch (MSTrace *startmst, char dataquality, + char *network, char *station, char *location, char *channel); +extern MSTrace* mst_findadjacent (MSTraceGroup *mstg, flag *whence, char dataquality, + char *network, char *station, char *location, char *channel, + double samprate, double sampratetol, + hptime_t starttime, hptime_t endtime, double timetol); +extern int mst_addmsr (MSTrace *mst, MSRecord *msr, flag whence); +extern int mst_addspan (MSTrace *mst, hptime_t starttime, hptime_t endtime, + void *datasamples, int64_t numsamples, + char sampletype, flag whence); +extern MSTrace* mst_addmsrtogroup (MSTraceGroup *mstg, MSRecord *msr, flag dataquality, + double timetol, double sampratetol); +extern MSTrace* mst_addtracetogroup (MSTraceGroup *mstg, MSTrace *mst); +extern int mst_groupheal (MSTraceGroup *mstg, double timetol, double sampratetol); +extern int mst_groupsort (MSTraceGroup *mstg, flag quality); +extern int mst_convertsamples (MSTrace *mst, char type, flag truncate); +extern char * mst_srcname (MSTrace *mst, char *srcname, flag quality); +extern void mst_printtracelist (MSTraceGroup *mstg, flag timeformat, + flag details, flag gaps); +extern void mst_printsynclist ( MSTraceGroup *mstg, char *dccid, flag subsecond ); +extern void mst_printgaplist (MSTraceGroup *mstg, flag timeformat, + double *mingap, double *maxgap); +extern int mst_pack (MSTrace *mst, void (*record_handler) (char *, int, void *), + void *handlerdata, int reclen, flag encoding, flag byteorder, + int64_t *packedsamples, flag flush, flag verbose, + MSRecord *mstemplate); +extern int mst_packgroup (MSTraceGroup *mstg, void (*record_handler) (char *, int, void *), + void *handlerdata, int reclen, flag encoding, flag byteorder, + int64_t *packedsamples, flag flush, flag verbose, + MSRecord *mstemplate); + +/* MSTraceList related functions */ +extern MSTraceList * mstl_init ( MSTraceList *mstl ); +extern void mstl_free ( MSTraceList **ppmstl, flag freeprvtptr ); +extern MSTraceSeg * mstl_addmsr ( MSTraceList *mstl, MSRecord *msr, flag dataquality, + flag autoheal, double timetol, double sampratetol ); +extern int mstl_convertsamples ( MSTraceSeg *seg, char type, flag truncate ); +extern void mstl_printtracelist ( MSTraceList *mstl, flag timeformat, + flag details, flag gaps ); +extern void mstl_printsynclist ( MSTraceList *mstl, char *dccid, flag subsecond ); +extern void mstl_printgaplist (MSTraceList *mstl, flag timeformat, + double *mingap, double *maxgap); + +/* Reading Mini-SEED records from files */ +typedef struct MSFileParam_s +{ + FILE *fp; + char filename[512]; + char *rawrec; + int readlen; + int readoffset; + int packtype; + off_t packhdroffset; + off_t filepos; + off_t filesize; + int recordcount; +} MSFileParam; + +extern int ms_readmsr (MSRecord **ppmsr, const char *msfile, int reclen, off_t *fpos, int *last, + flag skipnotdata, flag dataflag, flag verbose); +extern int ms_readmsr_r (MSFileParam **ppmsfp, MSRecord **ppmsr, const char *msfile, int reclen, + off_t *fpos, int *last, flag skipnotdata, flag dataflag, flag verbose); +extern int ms_readmsr_main (MSFileParam **ppmsfp, MSRecord **ppmsr, const char *msfile, int reclen, + off_t *fpos, int *last, flag skipnotdata, flag dataflag, Selections *selections, flag verbose); +extern int ms_readtraces (MSTraceGroup **ppmstg, const char *msfile, int reclen, double timetol, double sampratetol, + flag dataquality, flag skipnotdata, flag dataflag, flag verbose); +extern int ms_readtraces_timewin (MSTraceGroup **ppmstg, const char *msfile, int reclen, double timetol, double sampratetol, + hptime_t starttime, hptime_t endtime, flag dataquality, flag skipnotdata, flag dataflag, flag verbose); +extern int ms_readtraces_selection (MSTraceGroup **ppmstg, const char *msfile, int reclen, double timetol, double sampratetol, + Selections *selections, flag dataquality, flag skipnotdata, flag dataflag, flag verbose); +extern int ms_readtracelist (MSTraceList **ppmstl, const char *msfile, int reclen, double timetol, double sampratetol, + flag dataquality, flag skipnotdata, flag dataflag, flag verbose); +extern int ms_readtracelist_timewin (MSTraceList **ppmstl, const char *msfile, int reclen, double timetol, double sampratetol, + hptime_t starttime, hptime_t endtime, flag dataquality, flag skipnotdata, flag dataflag, flag verbose); +extern int ms_readtracelist_selection (MSTraceList **ppmstl, const char *msfile, int reclen, double timetol, double sampratetol, + Selections *selections, flag dataquality, flag skipnotdata, flag dataflag, flag verbose); + +extern int msr_writemseed ( MSRecord *msr, const char *msfile, flag overwrite, int reclen, + flag encoding, flag byteorder, flag verbose ); +extern int mst_writemseed ( MSTrace *mst, const char *msfile, flag overwrite, int reclen, + flag encoding, flag byteorder, flag verbose ); +extern int mst_writemseedgroup ( MSTraceGroup *mstg, const char *msfile, flag overwrite, + int reclen, flag encoding, flag byteorder, flag verbose ); + +/* General use functions */ +extern char* ms_recsrcname (char *record, char *srcname, flag quality); +extern int ms_splitsrcname (char *srcname, char *net, char *sta, char *loc, char *chan, char *qual); +extern int ms_strncpclean (char *dest, const char *source, int length); +extern int ms_strncpcleantail (char *dest, const char *source, int length); +extern int ms_strncpopen (char *dest, const char *source, int length); +extern int ms_doy2md (int year, int jday, int *month, int *mday); +extern int ms_md2doy (int year, int month, int mday, int *jday); +extern hptime_t ms_btime2hptime (BTime *btime); +extern char* ms_btime2isotimestr (BTime *btime, char *isotimestr); +extern char* ms_btime2mdtimestr (BTime *btime, char *mdtimestr); +extern char* ms_btime2seedtimestr (BTime *btime, char *seedtimestr); +extern int ms_hptime2tomsusecoffset (hptime_t hptime, hptime_t *toms, int8_t *usecoffset); +extern int ms_hptime2btime (hptime_t hptime, BTime *btime); +extern char* ms_hptime2isotimestr (hptime_t hptime, char *isotimestr, flag subsecond); +extern char* ms_hptime2mdtimestr (hptime_t hptime, char *mdtimestr, flag subsecond); +extern char* ms_hptime2seedtimestr (hptime_t hptime, char *seedtimestr, flag subsecond); +extern hptime_t ms_time2hptime (int year, int day, int hour, int min, int sec, int usec); +extern hptime_t ms_seedtimestr2hptime (char *seedtimestr); +extern hptime_t ms_timestr2hptime (char *timestr); +extern double ms_nomsamprate (int factor, int multiplier); +extern int ms_genfactmult (double samprate, int16_t *factor, int16_t *multiplier); +extern int ms_ratapprox (double real, int *num, int *den, int maxval, double precision); +extern int ms_bigendianhost (void); +extern double ms_dabs (double val); +extern double ms_rsqrt64 (double val); + + +/* Lookup functions */ +extern uint8_t ms_samplesize (const char sampletype); +extern char* ms_encodingstr (const char encoding); +extern char* ms_blktdesc (uint16_t blkttype); +extern uint16_t ms_blktlen (uint16_t blkttype, const char *blktdata, flag swapflag); +extern char * ms_errorstr (int errorcode); + +/* Logging facility */ +#define MAX_LOG_MSG_LENGTH 200 /* Maximum length of log messages */ + +/* Logging parameters */ +typedef struct MSLogParam_s +{ + void (*log_print)(char*); + const char *logprefix; + void (*diag_print)(char*); + const char *errprefix; +} MSLogParam; + +extern int ms_log (int level, ...); +extern int ms_log_l (MSLogParam *logp, int level, ...); +extern void ms_loginit (void (*log_print)(char*), const char *logprefix, + void (*diag_print)(char*), const char *errprefix); +extern MSLogParam *ms_loginit_l (MSLogParam *logp, + void (*log_print)(char*), const char *logprefix, + void (*diag_print)(char*), const char *errprefix); + +/* Selection functions */ +extern Selections *ms_matchselect (Selections *selections, char *srcname, + hptime_t starttime, hptime_t endtime, SelectTime **ppselecttime); +extern Selections *msr_matchselect (Selections *selections, MSRecord *msr, SelectTime **ppselecttime); +extern int ms_addselect (Selections **ppselections, char *srcname, + hptime_t starttime, hptime_t endtime); +extern int ms_addselect_comp (Selections **ppselections, char *net, char* sta, char *loc, + char *chan, char *qual, hptime_t starttime, hptime_t endtime); +extern int ms_readselectionsfile (Selections **ppselections, char *filename); +extern void ms_freeselections (Selections *selections); +extern void ms_printselections (Selections *selections); + +/* Leap second declarations, implementation in gentutils.c */ +typedef struct LeapSecond_s +{ + hptime_t leapsecond; + int32_t TAIdelta; + struct LeapSecond_s *next; +} LeapSecond; + +extern LeapSecond *leapsecondlist; +extern int ms_readleapseconds (char *envvarname); +extern int ms_readleapsecondfile (char *filename); + +/* Generic byte swapping routines */ +extern void ms_gswap2 ( void *data2 ); +extern void ms_gswap3 ( void *data3 ); +extern void ms_gswap4 ( void *data4 ); +extern void ms_gswap8 ( void *data8 ); + +/* Generic byte swapping routines for memory aligned quantities */ +extern void ms_gswap2a ( void *data2 ); +extern void ms_gswap4a ( void *data4 ); +extern void ms_gswap8a ( void *data8 ); + +/* Byte swap macro for the BTime struct */ +#define MS_SWAPBTIME(x) \ + ms_gswap2 (x.year); \ + ms_gswap2 (x.day); \ + ms_gswap2 (x.fract); + +/* Platform portable functions */ +extern off_t lmp_ftello (FILE *stream); +extern int lmp_fseeko (FILE *stream, off_t offset, int whence); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBMSEED_H */ diff --git a/include/lmplatform.h b/include/lmplatform.h new file mode 100644 index 0000000..60d3377 --- /dev/null +++ b/include/lmplatform.h @@ -0,0 +1,127 @@ +/*************************************************************************** + * lmplatform.h: + * + * Platform specific headers. This file provides a basic level of platform + * portability. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public License + * as published by the Free Software Foundation; either version 2 of + * the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License (GNU-LGPL) for more details. The + * GNU-LGPL and further information can be found here: + * http://www.gnu.org/ + * + * Written by Chad Trabant, IRIS Data Management Center + * + * modified: 2015.134 + ***************************************************************************/ + +#ifndef LMPLATFORM_H +#define LMPLATFORM_H 1 + +#ifdef __cplusplus +extern "C" { +#endif + + /* On some platforms (e.g. ARM) structures are aligned on word boundaries + by adding padding between the elements. This library uses structs that + map to SEED header/blockette structures that are required to have a + layout exactly as specified, i.e. no padding. + + If "ATTRIBUTE_PACKED" is defined at compile time (e.g. -DATTRIBUTE_PACKED) + the preprocessor will use the define below to add the "packed" attribute + to effected structs. This attribute is supported by GCC and increasingly + more compilers. + */ +#if defined(ATTRIBUTE_PACKED) + #define LMP_PACKED __attribute__((packed)) +#else + #define LMP_PACKED +#endif + +/* C99 standard headers */ +#include +#include +#include +#include +#include +#include + +/* Set architecture specific defines and features */ +#if defined(__linux__) || defined(__linux) || defined(__CYGWIN__) + #define LMP_LINUX 1 + #define LMP_GLIBC2 1 /* Deprecated */ + + #include + #include + +#elif defined(__sun__) || defined(__sun) + #define LMP_SOLARIS 1 + + #include + #include + +#elif defined(__APPLE__) || defined(__FreeBSD__) || defined(__OpenBSD__) || defined(__NetBSD__) + #define LMP_BSD 1 + + #include + #include + +#elif defined(WIN32) || defined(_WIN32) || defined(WIN64) || defined(_WIN64) + #define LMP_WIN 1 + #define LMP_WIN32 1 /* Deprecated */ + + #include + #include + + /* For pre-MSVC 2010 define standard int types, otherwise use inttypes.h */ + #if defined(_MSC_VER) && _MSC_VER < 1600 + typedef signed char int8_t; + typedef unsigned char uint8_t; + typedef signed short int int16_t; + typedef unsigned short int uint16_t; + typedef signed int int32_t; + typedef unsigned int uint32_t; + typedef signed __int64 int64_t; + typedef unsigned __int64 uint64_t; + #else + #include + #endif + + #if defined(_MSC_VER) + #if !defined(PRId64) + #define PRId64 "I64d" + #endif + #if !defined(SCNd64) + #define SCNd64 "I64d" + #endif + + #define snprintf _snprintf + #define vsnprintf _vsnprintf + #define strcasecmp _stricmp + #define strncasecmp _strnicmp + #define strtoull _strtoui64 + #define strdup _strdup + #define fileno _fileno + #endif + + #if defined(__MINGW32__) || defined(__MINGW64__) + #define fstat _fstat + #define stat _stat + #endif + +#endif + +extern off_t lmp_ftello (FILE *stream); +extern int lmp_fseeko (FILE *stream, off_t offset, int whence); + +#ifdef __cplusplus +} +#endif + +#endif /* LMPLATFORM_H */ diff --git a/include/rapidjson/allocators.h b/include/rapidjson/allocators.h new file mode 100644 index 0000000..98affe0 --- /dev/null +++ b/include/rapidjson/allocators.h @@ -0,0 +1,271 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_ALLOCATORS_H_ +#define RAPIDJSON_ALLOCATORS_H_ + +#include "rapidjson.h" + +RAPIDJSON_NAMESPACE_BEGIN + +/////////////////////////////////////////////////////////////////////////////// +// Allocator + +/*! \class rapidjson::Allocator + \brief Concept for allocating, resizing and freeing memory block. + + Note that Malloc() and Realloc() are non-static but Free() is static. + + So if an allocator need to support Free(), it needs to put its pointer in + the header of memory block. + +\code +concept Allocator { + static const bool kNeedFree; //!< Whether this allocator needs to call Free(). + + // Allocate a memory block. + // \param size of the memory block in bytes. + // \returns pointer to the memory block. + void* Malloc(size_t size); + + // Resize a memory block. + // \param originalPtr The pointer to current memory block. Null pointer is permitted. + // \param originalSize The current size in bytes. (Design issue: since some allocator may not book-keep this, explicitly pass to it can save memory.) + // \param newSize the new size in bytes. + void* Realloc(void* originalPtr, size_t originalSize, size_t newSize); + + // Free a memory block. + // \param pointer to the memory block. Null pointer is permitted. + static void Free(void *ptr); +}; +\endcode +*/ + +/////////////////////////////////////////////////////////////////////////////// +// CrtAllocator + +//! C-runtime library allocator. +/*! This class is just wrapper for standard C library memory routines. + \note implements Allocator concept +*/ +class CrtAllocator { +public: + static const bool kNeedFree = true; + void* Malloc(size_t size) { + if (size) // behavior of malloc(0) is implementation defined. + return std::malloc(size); + else + return NULL; // standardize to returning NULL. + } + void* Realloc(void* originalPtr, size_t originalSize, size_t newSize) { + (void)originalSize; + if (newSize == 0) { + std::free(originalPtr); + return NULL; + } + return std::realloc(originalPtr, newSize); + } + static void Free(void *ptr) { std::free(ptr); } +}; + +/////////////////////////////////////////////////////////////////////////////// +// MemoryPoolAllocator + +//! Default memory allocator used by the parser and DOM. +/*! This allocator allocate memory blocks from pre-allocated memory chunks. + + It does not free memory blocks. And Realloc() only allocate new memory. + + The memory chunks are allocated by BaseAllocator, which is CrtAllocator by default. + + User may also supply a buffer as the first chunk. + + If the user-buffer is full then additional chunks are allocated by BaseAllocator. + + The user-buffer is not deallocated by this allocator. + + \tparam BaseAllocator the allocator type for allocating memory chunks. Default is CrtAllocator. + \note implements Allocator concept +*/ +template +class MemoryPoolAllocator { +public: + static const bool kNeedFree = false; //!< Tell users that no need to call Free() with this allocator. (concept Allocator) + + //! Constructor with chunkSize. + /*! \param chunkSize The size of memory chunk. The default is kDefaultChunkSize. + \param baseAllocator The allocator for allocating memory chunks. + */ + MemoryPoolAllocator(size_t chunkSize = kDefaultChunkCapacity, BaseAllocator* baseAllocator = 0) : + chunkHead_(0), chunk_capacity_(chunkSize), userBuffer_(0), baseAllocator_(baseAllocator), ownBaseAllocator_(0) + { + } + + //! Constructor with user-supplied buffer. + /*! The user buffer will be used firstly. When it is full, memory pool allocates new chunk with chunk size. + + The user buffer will not be deallocated when this allocator is destructed. + + \param buffer User supplied buffer. + \param size Size of the buffer in bytes. It must at least larger than sizeof(ChunkHeader). + \param chunkSize The size of memory chunk. The default is kDefaultChunkSize. + \param baseAllocator The allocator for allocating memory chunks. + */ + MemoryPoolAllocator(void *buffer, size_t size, size_t chunkSize = kDefaultChunkCapacity, BaseAllocator* baseAllocator = 0) : + chunkHead_(0), chunk_capacity_(chunkSize), userBuffer_(buffer), baseAllocator_(baseAllocator), ownBaseAllocator_(0) + { + RAPIDJSON_ASSERT(buffer != 0); + RAPIDJSON_ASSERT(size > sizeof(ChunkHeader)); + chunkHead_ = reinterpret_cast(buffer); + chunkHead_->capacity = size - sizeof(ChunkHeader); + chunkHead_->size = 0; + chunkHead_->next = 0; + } + + //! Destructor. + /*! This deallocates all memory chunks, excluding the user-supplied buffer. + */ + ~MemoryPoolAllocator() { + Clear(); + RAPIDJSON_DELETE(ownBaseAllocator_); + } + + //! Deallocates all memory chunks, excluding the user-supplied buffer. + void Clear() { + while (chunkHead_ && chunkHead_ != userBuffer_) { + ChunkHeader* next = chunkHead_->next; + baseAllocator_->Free(chunkHead_); + chunkHead_ = next; + } + if (chunkHead_ && chunkHead_ == userBuffer_) + chunkHead_->size = 0; // Clear user buffer + } + + //! Computes the total capacity of allocated memory chunks. + /*! \return total capacity in bytes. + */ + size_t Capacity() const { + size_t capacity = 0; + for (ChunkHeader* c = chunkHead_; c != 0; c = c->next) + capacity += c->capacity; + return capacity; + } + + //! Computes the memory blocks allocated. + /*! \return total used bytes. + */ + size_t Size() const { + size_t size = 0; + for (ChunkHeader* c = chunkHead_; c != 0; c = c->next) + size += c->size; + return size; + } + + //! Allocates a memory block. (concept Allocator) + void* Malloc(size_t size) { + if (!size) + return NULL; + + size = RAPIDJSON_ALIGN(size); + if (chunkHead_ == 0 || chunkHead_->size + size > chunkHead_->capacity) + if (!AddChunk(chunk_capacity_ > size ? chunk_capacity_ : size)) + return NULL; + + void *buffer = reinterpret_cast(chunkHead_) + RAPIDJSON_ALIGN(sizeof(ChunkHeader)) + chunkHead_->size; + chunkHead_->size += size; + return buffer; + } + + //! Resizes a memory block (concept Allocator) + void* Realloc(void* originalPtr, size_t originalSize, size_t newSize) { + if (originalPtr == 0) + return Malloc(newSize); + + if (newSize == 0) + return NULL; + + originalSize = RAPIDJSON_ALIGN(originalSize); + newSize = RAPIDJSON_ALIGN(newSize); + + // Do not shrink if new size is smaller than original + if (originalSize >= newSize) + return originalPtr; + + // Simply expand it if it is the last allocation and there is sufficient space + if (originalPtr == reinterpret_cast(chunkHead_) + RAPIDJSON_ALIGN(sizeof(ChunkHeader)) + chunkHead_->size - originalSize) { + size_t increment = static_cast(newSize - originalSize); + if (chunkHead_->size + increment <= chunkHead_->capacity) { + chunkHead_->size += increment; + return originalPtr; + } + } + + // Realloc process: allocate and copy memory, do not free original buffer. + if (void* newBuffer = Malloc(newSize)) { + if (originalSize) + std::memcpy(newBuffer, originalPtr, originalSize); + return newBuffer; + } + else + return NULL; + } + + //! Frees a memory block (concept Allocator) + static void Free(void *ptr) { (void)ptr; } // Do nothing + +private: + //! Copy constructor is not permitted. + MemoryPoolAllocator(const MemoryPoolAllocator& rhs) /* = delete */; + //! Copy assignment operator is not permitted. + MemoryPoolAllocator& operator=(const MemoryPoolAllocator& rhs) /* = delete */; + + //! Creates a new chunk. + /*! \param capacity Capacity of the chunk in bytes. + \return true if success. + */ + bool AddChunk(size_t capacity) { + if (!baseAllocator_) + ownBaseAllocator_ = baseAllocator_ = RAPIDJSON_NEW(BaseAllocator()); + if (ChunkHeader* chunk = reinterpret_cast(baseAllocator_->Malloc(RAPIDJSON_ALIGN(sizeof(ChunkHeader)) + capacity))) { + chunk->capacity = capacity; + chunk->size = 0; + chunk->next = chunkHead_; + chunkHead_ = chunk; + return true; + } + else + return false; + } + + static const int kDefaultChunkCapacity = 64 * 1024; //!< Default chunk capacity. + + //! Chunk header for perpending to each chunk. + /*! Chunks are stored as a singly linked list. + */ + struct ChunkHeader { + size_t capacity; //!< Capacity of the chunk in bytes (excluding the header itself). + size_t size; //!< Current size of allocated memory in bytes. + ChunkHeader *next; //!< Next chunk in the linked list. + }; + + ChunkHeader *chunkHead_; //!< Head of the chunk linked-list. Only the head chunk serves allocation. + size_t chunk_capacity_; //!< The minimum capacity of chunk when they are allocated. + void *userBuffer_; //!< User supplied buffer. + BaseAllocator* baseAllocator_; //!< base allocator for allocating memory chunks. + BaseAllocator* ownBaseAllocator_; //!< base allocator created by this object. +}; + +RAPIDJSON_NAMESPACE_END + +#endif // RAPIDJSON_ENCODINGS_H_ diff --git a/include/rapidjson/document.h b/include/rapidjson/document.h new file mode 100644 index 0000000..b5934fc --- /dev/null +++ b/include/rapidjson/document.h @@ -0,0 +1,2575 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_DOCUMENT_H_ +#define RAPIDJSON_DOCUMENT_H_ + +/*! \file document.h */ + +#include "reader.h" +#include "internal/meta.h" +#include "internal/strfunc.h" +#include "memorystream.h" +#include "encodedstream.h" +#include // placement new +#include + +RAPIDJSON_DIAG_PUSH +#ifdef _MSC_VER +RAPIDJSON_DIAG_OFF(4127) // conditional expression is constant +RAPIDJSON_DIAG_OFF(4244) // conversion from kXxxFlags to 'uint16_t', possible loss of data +#endif + +#ifdef __clang__ +RAPIDJSON_DIAG_OFF(padded) +RAPIDJSON_DIAG_OFF(switch-enum) +RAPIDJSON_DIAG_OFF(c++98-compat) +#endif + +#ifdef __GNUC__ +RAPIDJSON_DIAG_OFF(effc++) +#if __GNUC__ >= 6 +RAPIDJSON_DIAG_OFF(terminate) // ignore throwing RAPIDJSON_ASSERT in RAPIDJSON_NOEXCEPT functions +#endif +#endif // __GNUC__ + +#ifndef RAPIDJSON_NOMEMBERITERATORCLASS +#include // std::iterator, std::random_access_iterator_tag +#endif + +#if RAPIDJSON_HAS_CXX11_RVALUE_REFS +#include // std::move +#endif + +RAPIDJSON_NAMESPACE_BEGIN + +// Forward declaration. +template +class GenericValue; + +template +class GenericDocument; + +//! Name-value pair in a JSON object value. +/*! + This class was internal to GenericValue. It used to be a inner struct. + But a compiler (IBM XL C/C++ for AIX) have reported to have problem with that so it moved as a namespace scope struct. + https://code.google.com/p/rapidjson/issues/detail?id=64 +*/ +template +struct GenericMember { + GenericValue name; //!< name of member (must be a string) + GenericValue value; //!< value of member. +}; + +/////////////////////////////////////////////////////////////////////////////// +// GenericMemberIterator + +#ifndef RAPIDJSON_NOMEMBERITERATORCLASS + +//! (Constant) member iterator for a JSON object value +/*! + \tparam Const Is this a constant iterator? + \tparam Encoding Encoding of the value. (Even non-string values need to have the same encoding in a document) + \tparam Allocator Allocator type for allocating memory of object, array and string. + + This class implements a Random Access Iterator for GenericMember elements + of a GenericValue, see ISO/IEC 14882:2003(E) C++ standard, 24.1 [lib.iterator.requirements]. + + \note This iterator implementation is mainly intended to avoid implicit + conversions from iterator values to \c NULL, + e.g. from GenericValue::FindMember. + + \note Define \c RAPIDJSON_NOMEMBERITERATORCLASS to fall back to a + pointer-based implementation, if your platform doesn't provide + the C++ header. + + \see GenericMember, GenericValue::MemberIterator, GenericValue::ConstMemberIterator + */ +template +class GenericMemberIterator + : public std::iterator >::Type> { + + friend class GenericValue; + template friend class GenericMemberIterator; + + typedef GenericMember PlainType; + typedef typename internal::MaybeAddConst::Type ValueType; + typedef std::iterator BaseType; + +public: + //! Iterator type itself + typedef GenericMemberIterator Iterator; + //! Constant iterator type + typedef GenericMemberIterator ConstIterator; + //! Non-constant iterator type + typedef GenericMemberIterator NonConstIterator; + + //! Pointer to (const) GenericMember + typedef typename BaseType::pointer Pointer; + //! Reference to (const) GenericMember + typedef typename BaseType::reference Reference; + //! Signed integer type (e.g. \c ptrdiff_t) + typedef typename BaseType::difference_type DifferenceType; + + //! Default constructor (singular value) + /*! Creates an iterator pointing to no element. + \note All operations, except for comparisons, are undefined on such values. + */ + GenericMemberIterator() : ptr_() {} + + //! Iterator conversions to more const + /*! + \param it (Non-const) iterator to copy from + + Allows the creation of an iterator from another GenericMemberIterator + that is "less const". Especially, creating a non-constant iterator + from a constant iterator are disabled: + \li const -> non-const (not ok) + \li const -> const (ok) + \li non-const -> const (ok) + \li non-const -> non-const (ok) + + \note If the \c Const template parameter is already \c false, this + constructor effectively defines a regular copy-constructor. + Otherwise, the copy constructor is implicitly defined. + */ + GenericMemberIterator(const NonConstIterator & it) : ptr_(it.ptr_) {} + Iterator& operator=(const NonConstIterator & it) { ptr_ = it.ptr_; return *this; } + + //! @name stepping + //@{ + Iterator& operator++(){ ++ptr_; return *this; } + Iterator& operator--(){ --ptr_; return *this; } + Iterator operator++(int){ Iterator old(*this); ++ptr_; return old; } + Iterator operator--(int){ Iterator old(*this); --ptr_; return old; } + //@} + + //! @name increment/decrement + //@{ + Iterator operator+(DifferenceType n) const { return Iterator(ptr_+n); } + Iterator operator-(DifferenceType n) const { return Iterator(ptr_-n); } + + Iterator& operator+=(DifferenceType n) { ptr_+=n; return *this; } + Iterator& operator-=(DifferenceType n) { ptr_-=n; return *this; } + //@} + + //! @name relations + //@{ + bool operator==(ConstIterator that) const { return ptr_ == that.ptr_; } + bool operator!=(ConstIterator that) const { return ptr_ != that.ptr_; } + bool operator<=(ConstIterator that) const { return ptr_ <= that.ptr_; } + bool operator>=(ConstIterator that) const { return ptr_ >= that.ptr_; } + bool operator< (ConstIterator that) const { return ptr_ < that.ptr_; } + bool operator> (ConstIterator that) const { return ptr_ > that.ptr_; } + //@} + + //! @name dereference + //@{ + Reference operator*() const { return *ptr_; } + Pointer operator->() const { return ptr_; } + Reference operator[](DifferenceType n) const { return ptr_[n]; } + //@} + + //! Distance + DifferenceType operator-(ConstIterator that) const { return ptr_-that.ptr_; } + +private: + //! Internal constructor from plain pointer + explicit GenericMemberIterator(Pointer p) : ptr_(p) {} + + Pointer ptr_; //!< raw pointer +}; + +#else // RAPIDJSON_NOMEMBERITERATORCLASS + +// class-based member iterator implementation disabled, use plain pointers + +template +struct GenericMemberIterator; + +//! non-const GenericMemberIterator +template +struct GenericMemberIterator { + //! use plain pointer as iterator type + typedef GenericMember* Iterator; +}; +//! const GenericMemberIterator +template +struct GenericMemberIterator { + //! use plain const pointer as iterator type + typedef const GenericMember* Iterator; +}; + +#endif // RAPIDJSON_NOMEMBERITERATORCLASS + +/////////////////////////////////////////////////////////////////////////////// +// GenericStringRef + +//! Reference to a constant string (not taking a copy) +/*! + \tparam CharType character type of the string + + This helper class is used to automatically infer constant string + references for string literals, especially from \c const \b (!) + character arrays. + + The main use is for creating JSON string values without copying the + source string via an \ref Allocator. This requires that the referenced + string pointers have a sufficient lifetime, which exceeds the lifetime + of the associated GenericValue. + + \b Example + \code + Value v("foo"); // ok, no need to copy & calculate length + const char foo[] = "foo"; + v.SetString(foo); // ok + + const char* bar = foo; + // Value x(bar); // not ok, can't rely on bar's lifetime + Value x(StringRef(bar)); // lifetime explicitly guaranteed by user + Value y(StringRef(bar, 3)); // ok, explicitly pass length + \endcode + + \see StringRef, GenericValue::SetString +*/ +template +struct GenericStringRef { + typedef CharType Ch; //!< character type of the string + + //! Create string reference from \c const character array +#ifndef __clang__ // -Wdocumentation + /*! + This constructor implicitly creates a constant string reference from + a \c const character array. It has better performance than + \ref StringRef(const CharType*) by inferring the string \ref length + from the array length, and also supports strings containing null + characters. + + \tparam N length of the string, automatically inferred + + \param str Constant character array, lifetime assumed to be longer + than the use of the string in e.g. a GenericValue + + \post \ref s == str + + \note Constant complexity. + \note There is a hidden, private overload to disallow references to + non-const character arrays to be created via this constructor. + By this, e.g. function-scope arrays used to be filled via + \c snprintf are excluded from consideration. + In such cases, the referenced string should be \b copied to the + GenericValue instead. + */ +#endif + template + GenericStringRef(const CharType (&str)[N]) RAPIDJSON_NOEXCEPT + : s(str), length(N-1) {} + + //! Explicitly create string reference from \c const character pointer +#ifndef __clang__ // -Wdocumentation + /*! + This constructor can be used to \b explicitly create a reference to + a constant string pointer. + + \see StringRef(const CharType*) + + \param str Constant character pointer, lifetime assumed to be longer + than the use of the string in e.g. a GenericValue + + \post \ref s == str + + \note There is a hidden, private overload to disallow references to + non-const character arrays to be created via this constructor. + By this, e.g. function-scope arrays used to be filled via + \c snprintf are excluded from consideration. + In such cases, the referenced string should be \b copied to the + GenericValue instead. + */ +#endif + explicit GenericStringRef(const CharType* str) + : s(str), length(internal::StrLen(str)){ RAPIDJSON_ASSERT(s != 0); } + + //! Create constant string reference from pointer and length +#ifndef __clang__ // -Wdocumentation + /*! \param str constant string, lifetime assumed to be longer than the use of the string in e.g. a GenericValue + \param len length of the string, excluding the trailing NULL terminator + + \post \ref s == str && \ref length == len + \note Constant complexity. + */ +#endif + GenericStringRef(const CharType* str, SizeType len) + : s(str), length(len) { RAPIDJSON_ASSERT(s != 0); } + + GenericStringRef(const GenericStringRef& rhs) : s(rhs.s), length(rhs.length) {} + + GenericStringRef& operator=(const GenericStringRef& rhs) { s = rhs.s; length = rhs.length; } + + //! implicit conversion to plain CharType pointer + operator const Ch *() const { return s; } + + const Ch* const s; //!< plain CharType pointer + const SizeType length; //!< length of the string (excluding the trailing NULL terminator) + +private: + //! Disallow construction from non-const array + template + GenericStringRef(CharType (&str)[N]) /* = delete */; +}; + +//! Mark a character pointer as constant string +/*! Mark a plain character pointer as a "string literal". This function + can be used to avoid copying a character string to be referenced as a + value in a JSON GenericValue object, if the string's lifetime is known + to be valid long enough. + \tparam CharType Character type of the string + \param str Constant string, lifetime assumed to be longer than the use of the string in e.g. a GenericValue + \return GenericStringRef string reference object + \relatesalso GenericStringRef + + \see GenericValue::GenericValue(StringRefType), GenericValue::operator=(StringRefType), GenericValue::SetString(StringRefType), GenericValue::PushBack(StringRefType, Allocator&), GenericValue::AddMember +*/ +template +inline GenericStringRef StringRef(const CharType* str) { + return GenericStringRef(str, internal::StrLen(str)); +} + +//! Mark a character pointer as constant string +/*! Mark a plain character pointer as a "string literal". This function + can be used to avoid copying a character string to be referenced as a + value in a JSON GenericValue object, if the string's lifetime is known + to be valid long enough. + + This version has better performance with supplied length, and also + supports string containing null characters. + + \tparam CharType character type of the string + \param str Constant string, lifetime assumed to be longer than the use of the string in e.g. a GenericValue + \param length The length of source string. + \return GenericStringRef string reference object + \relatesalso GenericStringRef +*/ +template +inline GenericStringRef StringRef(const CharType* str, size_t length) { + return GenericStringRef(str, SizeType(length)); +} + +#if RAPIDJSON_HAS_STDSTRING +//! Mark a string object as constant string +/*! Mark a string object (e.g. \c std::string) as a "string literal". + This function can be used to avoid copying a string to be referenced as a + value in a JSON GenericValue object, if the string's lifetime is known + to be valid long enough. + + \tparam CharType character type of the string + \param str Constant string, lifetime assumed to be longer than the use of the string in e.g. a GenericValue + \return GenericStringRef string reference object + \relatesalso GenericStringRef + \note Requires the definition of the preprocessor symbol \ref RAPIDJSON_HAS_STDSTRING. +*/ +template +inline GenericStringRef StringRef(const std::basic_string& str) { + return GenericStringRef(str.data(), SizeType(str.size())); +} +#endif + +/////////////////////////////////////////////////////////////////////////////// +// GenericValue type traits +namespace internal { + +template +struct IsGenericValueImpl : FalseType {}; + +// select candidates according to nested encoding and allocator types +template struct IsGenericValueImpl::Type, typename Void::Type> + : IsBaseOf, T>::Type {}; + +// helper to match arbitrary GenericValue instantiations, including derived classes +template struct IsGenericValue : IsGenericValueImpl::Type {}; + +} // namespace internal + +/////////////////////////////////////////////////////////////////////////////// +// TypeHelper + +namespace internal { + +template +struct TypeHelper {}; + +template +struct TypeHelper { + static bool Is(const ValueType& v) { return v.IsBool(); } + static bool Get(const ValueType& v) { return v.GetBool(); } + static ValueType& Set(ValueType& v, bool data) { return v.SetBool(data); } + static ValueType& Set(ValueType& v, bool data, typename ValueType::AllocatorType&) { return v.SetBool(data); } +}; + +template +struct TypeHelper { + static bool Is(const ValueType& v) { return v.IsInt(); } + static int Get(const ValueType& v) { return v.GetInt(); } + static ValueType& Set(ValueType& v, int data) { return v.SetInt(data); } + static ValueType& Set(ValueType& v, int data, typename ValueType::AllocatorType&) { return v.SetInt(data); } +}; + +template +struct TypeHelper { + static bool Is(const ValueType& v) { return v.IsUint(); } + static unsigned Get(const ValueType& v) { return v.GetUint(); } + static ValueType& Set(ValueType& v, unsigned data) { return v.SetUint(data); } + static ValueType& Set(ValueType& v, unsigned data, typename ValueType::AllocatorType&) { return v.SetUint(data); } +}; + +template +struct TypeHelper { + static bool Is(const ValueType& v) { return v.IsInt64(); } + static int64_t Get(const ValueType& v) { return v.GetInt64(); } + static ValueType& Set(ValueType& v, int64_t data) { return v.SetInt64(data); } + static ValueType& Set(ValueType& v, int64_t data, typename ValueType::AllocatorType&) { return v.SetInt64(data); } +}; + +template +struct TypeHelper { + static bool Is(const ValueType& v) { return v.IsUint64(); } + static uint64_t Get(const ValueType& v) { return v.GetUint64(); } + static ValueType& Set(ValueType& v, uint64_t data) { return v.SetUint64(data); } + static ValueType& Set(ValueType& v, uint64_t data, typename ValueType::AllocatorType&) { return v.SetUint64(data); } +}; + +template +struct TypeHelper { + static bool Is(const ValueType& v) { return v.IsDouble(); } + static double Get(const ValueType& v) { return v.GetDouble(); } + static ValueType& Set(ValueType& v, double data) { return v.SetDouble(data); } + static ValueType& Set(ValueType& v, double data, typename ValueType::AllocatorType&) { return v.SetDouble(data); } +}; + +template +struct TypeHelper { + static bool Is(const ValueType& v) { return v.IsFloat(); } + static float Get(const ValueType& v) { return v.GetFloat(); } + static ValueType& Set(ValueType& v, float data) { return v.SetFloat(data); } + static ValueType& Set(ValueType& v, float data, typename ValueType::AllocatorType&) { return v.SetFloat(data); } +}; + +template +struct TypeHelper { + typedef const typename ValueType::Ch* StringType; + static bool Is(const ValueType& v) { return v.IsString(); } + static StringType Get(const ValueType& v) { return v.GetString(); } + static ValueType& Set(ValueType& v, const StringType data) { return v.SetString(typename ValueType::StringRefType(data)); } + static ValueType& Set(ValueType& v, const StringType data, typename ValueType::AllocatorType& a) { return v.SetString(data, a); } +}; + +#if RAPIDJSON_HAS_STDSTRING +template +struct TypeHelper > { + typedef std::basic_string StringType; + static bool Is(const ValueType& v) { return v.IsString(); } + static StringType Get(const ValueType& v) { return StringType(v.GetString(), v.GetStringLength()); } + static ValueType& Set(ValueType& v, const StringType& data, typename ValueType::AllocatorType& a) { return v.SetString(data, a); } +}; +#endif + +template +struct TypeHelper { + typedef typename ValueType::Array ArrayType; + static bool Is(const ValueType& v) { return v.IsArray(); } + static ArrayType Get(ValueType& v) { return v.GetArray(); } + static ValueType& Set(ValueType& v, ArrayType data) { return v = data; } + static ValueType& Set(ValueType& v, ArrayType data, typename ValueType::AllocatorType&) { return v = data; } +}; + +template +struct TypeHelper { + typedef typename ValueType::ConstArray ArrayType; + static bool Is(const ValueType& v) { return v.IsArray(); } + static ArrayType Get(const ValueType& v) { return v.GetArray(); } +}; + +template +struct TypeHelper { + typedef typename ValueType::Object ObjectType; + static bool Is(const ValueType& v) { return v.IsObject(); } + static ObjectType Get(ValueType& v) { return v.GetObject(); } + static ValueType& Set(ValueType& v, ObjectType data) { return v = data; } + static ValueType& Set(ValueType& v, ObjectType data, typename ValueType::AllocatorType&) { v = data; } +}; + +template +struct TypeHelper { + typedef typename ValueType::ConstObject ObjectType; + static bool Is(const ValueType& v) { return v.IsObject(); } + static ObjectType Get(const ValueType& v) { return v.GetObject(); } +}; + +} // namespace internal + +// Forward declarations +template class GenericArray; +template class GenericObject; + +/////////////////////////////////////////////////////////////////////////////// +// GenericValue + +//! Represents a JSON value. Use Value for UTF8 encoding and default allocator. +/*! + A JSON value can be one of 7 types. This class is a variant type supporting + these types. + + Use the Value if UTF8 and default allocator + + \tparam Encoding Encoding of the value. (Even non-string values need to have the same encoding in a document) + \tparam Allocator Allocator type for allocating memory of object, array and string. +*/ +template > +class GenericValue { +public: + //! Name-value pair in an object. + typedef GenericMember Member; + typedef Encoding EncodingType; //!< Encoding type from template parameter. + typedef Allocator AllocatorType; //!< Allocator type from template parameter. + typedef typename Encoding::Ch Ch; //!< Character type derived from Encoding. + typedef GenericStringRef StringRefType; //!< Reference to a constant string + typedef typename GenericMemberIterator::Iterator MemberIterator; //!< Member iterator for iterating in object. + typedef typename GenericMemberIterator::Iterator ConstMemberIterator; //!< Constant member iterator for iterating in object. + typedef GenericValue* ValueIterator; //!< Value iterator for iterating in array. + typedef const GenericValue* ConstValueIterator; //!< Constant value iterator for iterating in array. + typedef GenericValue ValueType; //!< Value type of itself. + typedef GenericArray Array; + typedef GenericArray ConstArray; + typedef GenericObject Object; + typedef GenericObject ConstObject; + + //!@name Constructors and destructor. + //@{ + + //! Default constructor creates a null value. + GenericValue() RAPIDJSON_NOEXCEPT : data_() { data_.f.flags = kNullFlag; } + +#if RAPIDJSON_HAS_CXX11_RVALUE_REFS + //! Move constructor in C++11 + GenericValue(GenericValue&& rhs) RAPIDJSON_NOEXCEPT : data_(rhs.data_) { + rhs.data_.f.flags = kNullFlag; // give up contents + } +#endif + +private: + //! Copy constructor is not permitted. + GenericValue(const GenericValue& rhs); + +#if RAPIDJSON_HAS_CXX11_RVALUE_REFS + //! Moving from a GenericDocument is not permitted. + template + GenericValue(GenericDocument&& rhs); + + //! Move assignment from a GenericDocument is not permitted. + template + GenericValue& operator=(GenericDocument&& rhs); +#endif + +public: + + //! Constructor with JSON value type. + /*! This creates a Value of specified type with default content. + \param type Type of the value. + \note Default content for number is zero. + */ + explicit GenericValue(Type type) RAPIDJSON_NOEXCEPT : data_() { + static const uint16_t defaultFlags[7] = { + kNullFlag, kFalseFlag, kTrueFlag, kObjectFlag, kArrayFlag, kShortStringFlag, + kNumberAnyFlag + }; + RAPIDJSON_ASSERT(type <= kNumberType); + data_.f.flags = defaultFlags[type]; + + // Use ShortString to store empty string. + if (type == kStringType) + data_.ss.SetLength(0); + } + + //! Explicit copy constructor (with allocator) + /*! Creates a copy of a Value by using the given Allocator + \tparam SourceAllocator allocator of \c rhs + \param rhs Value to copy from (read-only) + \param allocator Allocator for allocating copied elements and buffers. Commonly use GenericDocument::GetAllocator(). + \see CopyFrom() + */ + template< typename SourceAllocator > + GenericValue(const GenericValue& rhs, Allocator & allocator); + + //! Constructor for boolean value. + /*! \param b Boolean value + \note This constructor is limited to \em real boolean values and rejects + implicitly converted types like arbitrary pointers. Use an explicit cast + to \c bool, if you want to construct a boolean JSON value in such cases. + */ +#ifndef RAPIDJSON_DOXYGEN_RUNNING // hide SFINAE from Doxygen + template + explicit GenericValue(T b, RAPIDJSON_ENABLEIF((internal::IsSame))) RAPIDJSON_NOEXCEPT // See #472 +#else + explicit GenericValue(bool b) RAPIDJSON_NOEXCEPT +#endif + : data_() { + // safe-guard against failing SFINAE + RAPIDJSON_STATIC_ASSERT((internal::IsSame::Value)); + data_.f.flags = b ? kTrueFlag : kFalseFlag; + } + + //! Constructor for int value. + explicit GenericValue(int i) RAPIDJSON_NOEXCEPT : data_() { + data_.n.i64 = i; + data_.f.flags = (i >= 0) ? (kNumberIntFlag | kUintFlag | kUint64Flag) : kNumberIntFlag; + } + + //! Constructor for unsigned value. + explicit GenericValue(unsigned u) RAPIDJSON_NOEXCEPT : data_() { + data_.n.u64 = u; + data_.f.flags = (u & 0x80000000) ? kNumberUintFlag : (kNumberUintFlag | kIntFlag | kInt64Flag); + } + + //! Constructor for int64_t value. + explicit GenericValue(int64_t i64) RAPIDJSON_NOEXCEPT : data_() { + data_.n.i64 = i64; + data_.f.flags = kNumberInt64Flag; + if (i64 >= 0) { + data_.f.flags |= kNumberUint64Flag; + if (!(static_cast(i64) & RAPIDJSON_UINT64_C2(0xFFFFFFFF, 0x00000000))) + data_.f.flags |= kUintFlag; + if (!(static_cast(i64) & RAPIDJSON_UINT64_C2(0xFFFFFFFF, 0x80000000))) + data_.f.flags |= kIntFlag; + } + else if (i64 >= static_cast(RAPIDJSON_UINT64_C2(0xFFFFFFFF, 0x80000000))) + data_.f.flags |= kIntFlag; + } + + //! Constructor for uint64_t value. + explicit GenericValue(uint64_t u64) RAPIDJSON_NOEXCEPT : data_() { + data_.n.u64 = u64; + data_.f.flags = kNumberUint64Flag; + if (!(u64 & RAPIDJSON_UINT64_C2(0x80000000, 0x00000000))) + data_.f.flags |= kInt64Flag; + if (!(u64 & RAPIDJSON_UINT64_C2(0xFFFFFFFF, 0x00000000))) + data_.f.flags |= kUintFlag; + if (!(u64 & RAPIDJSON_UINT64_C2(0xFFFFFFFF, 0x80000000))) + data_.f.flags |= kIntFlag; + } + + //! Constructor for double value. + explicit GenericValue(double d) RAPIDJSON_NOEXCEPT : data_() { data_.n.d = d; data_.f.flags = kNumberDoubleFlag; } + + //! Constructor for constant string (i.e. do not make a copy of string) + GenericValue(const Ch* s, SizeType length) RAPIDJSON_NOEXCEPT : data_() { SetStringRaw(StringRef(s, length)); } + + //! Constructor for constant string (i.e. do not make a copy of string) + explicit GenericValue(StringRefType s) RAPIDJSON_NOEXCEPT : data_() { SetStringRaw(s); } + + //! Constructor for copy-string (i.e. do make a copy of string) + GenericValue(const Ch* s, SizeType length, Allocator& allocator) : data_() { SetStringRaw(StringRef(s, length), allocator); } + + //! Constructor for copy-string (i.e. do make a copy of string) + GenericValue(const Ch*s, Allocator& allocator) : data_() { SetStringRaw(StringRef(s), allocator); } + +#if RAPIDJSON_HAS_STDSTRING + //! Constructor for copy-string from a string object (i.e. do make a copy of string) + /*! \note Requires the definition of the preprocessor symbol \ref RAPIDJSON_HAS_STDSTRING. + */ + GenericValue(const std::basic_string& s, Allocator& allocator) : data_() { SetStringRaw(StringRef(s), allocator); } +#endif + + //! Constructor for Array. + /*! + \param a An array obtained by \c GetArray(). + \note \c Array is always pass-by-value. + \note the source array is moved into this value and the sourec array becomes empty. + */ + GenericValue(Array a) RAPIDJSON_NOEXCEPT : data_(a.value_.data_) { + a.value_.data_ = Data(); + a.value_.data_.f.flags = kArrayFlag; + } + + //! Constructor for Object. + /*! + \param o An object obtained by \c GetObject(). + \note \c Object is always pass-by-value. + \note the source object is moved into this value and the sourec object becomes empty. + */ + GenericValue(Object o) RAPIDJSON_NOEXCEPT : data_(o.value_.data_) { + o.value_.data_ = Data(); + o.value_.data_.f.flags = kObjectFlag; + } + + //! Destructor. + /*! Need to destruct elements of array, members of object, or copy-string. + */ + ~GenericValue() { + if (Allocator::kNeedFree) { // Shortcut by Allocator's trait + switch(data_.f.flags) { + case kArrayFlag: + { + GenericValue* e = GetElementsPointer(); + for (GenericValue* v = e; v != e + data_.a.size; ++v) + v->~GenericValue(); + Allocator::Free(e); + } + break; + + case kObjectFlag: + for (MemberIterator m = MemberBegin(); m != MemberEnd(); ++m) + m->~Member(); + Allocator::Free(GetMembersPointer()); + break; + + case kCopyStringFlag: + Allocator::Free(const_cast(GetStringPointer())); + break; + + default: + break; // Do nothing for other types. + } + } + } + + //@} + + //!@name Assignment operators + //@{ + + //! Assignment with move semantics. + /*! \param rhs Source of the assignment. It will become a null value after assignment. + */ + GenericValue& operator=(GenericValue& rhs) RAPIDJSON_NOEXCEPT { + RAPIDJSON_ASSERT(this != &rhs); + this->~GenericValue(); + RawAssign(rhs); + return *this; + } + +#if RAPIDJSON_HAS_CXX11_RVALUE_REFS + //! Move assignment in C++11 + GenericValue& operator=(GenericValue&& rhs) RAPIDJSON_NOEXCEPT { + return *this = rhs.Move(); + } +#endif + + //! Assignment of constant string reference (no copy) + /*! \param str Constant string reference to be assigned + \note This overload is needed to avoid clashes with the generic primitive type assignment overload below. + \see GenericStringRef, operator=(T) + */ + GenericValue& operator=(StringRefType str) RAPIDJSON_NOEXCEPT { + GenericValue s(str); + return *this = s; + } + + //! Assignment with primitive types. + /*! \tparam T Either \ref Type, \c int, \c unsigned, \c int64_t, \c uint64_t + \param value The value to be assigned. + + \note The source type \c T explicitly disallows all pointer types, + especially (\c const) \ref Ch*. This helps avoiding implicitly + referencing character strings with insufficient lifetime, use + \ref SetString(const Ch*, Allocator&) (for copying) or + \ref StringRef() (to explicitly mark the pointer as constant) instead. + All other pointer types would implicitly convert to \c bool, + use \ref SetBool() instead. + */ + template + RAPIDJSON_DISABLEIF_RETURN((internal::IsPointer), (GenericValue&)) + operator=(T value) { + GenericValue v(value); + return *this = v; + } + + //! Deep-copy assignment from Value + /*! Assigns a \b copy of the Value to the current Value object + \tparam SourceAllocator Allocator type of \c rhs + \param rhs Value to copy from (read-only) + \param allocator Allocator to use for copying + */ + template + GenericValue& CopyFrom(const GenericValue& rhs, Allocator& allocator) { + RAPIDJSON_ASSERT(static_cast(this) != static_cast(&rhs)); + this->~GenericValue(); + new (this) GenericValue(rhs, allocator); + return *this; + } + + //! Exchange the contents of this value with those of other. + /*! + \param other Another value. + \note Constant complexity. + */ + GenericValue& Swap(GenericValue& other) RAPIDJSON_NOEXCEPT { + GenericValue temp; + temp.RawAssign(*this); + RawAssign(other); + other.RawAssign(temp); + return *this; + } + + //! free-standing swap function helper + /*! + Helper function to enable support for common swap implementation pattern based on \c std::swap: + \code + void swap(MyClass& a, MyClass& b) { + using std::swap; + swap(a.value, b.value); + // ... + } + \endcode + \see Swap() + */ + friend inline void swap(GenericValue& a, GenericValue& b) RAPIDJSON_NOEXCEPT { a.Swap(b); } + + //! Prepare Value for move semantics + /*! \return *this */ + GenericValue& Move() RAPIDJSON_NOEXCEPT { return *this; } + //@} + + //!@name Equal-to and not-equal-to operators + //@{ + //! Equal-to operator + /*! + \note If an object contains duplicated named member, comparing equality with any object is always \c false. + \note Linear time complexity (number of all values in the subtree and total lengths of all strings). + */ + template + bool operator==(const GenericValue& rhs) const { + typedef GenericValue RhsType; + if (GetType() != rhs.GetType()) + return false; + + switch (GetType()) { + case kObjectType: // Warning: O(n^2) inner-loop + if (data_.o.size != rhs.data_.o.size) + return false; + for (ConstMemberIterator lhsMemberItr = MemberBegin(); lhsMemberItr != MemberEnd(); ++lhsMemberItr) { + typename RhsType::ConstMemberIterator rhsMemberItr = rhs.FindMember(lhsMemberItr->name); + if (rhsMemberItr == rhs.MemberEnd() || lhsMemberItr->value != rhsMemberItr->value) + return false; + } + return true; + + case kArrayType: + if (data_.a.size != rhs.data_.a.size) + return false; + for (SizeType i = 0; i < data_.a.size; i++) + if ((*this)[i] != rhs[i]) + return false; + return true; + + case kStringType: + return StringEqual(rhs); + + case kNumberType: + if (IsDouble() || rhs.IsDouble()) { + double a = GetDouble(); // May convert from integer to double. + double b = rhs.GetDouble(); // Ditto + return a >= b && a <= b; // Prevent -Wfloat-equal + } + else + return data_.n.u64 == rhs.data_.n.u64; + + default: + return true; + } + } + + //! Equal-to operator with const C-string pointer + bool operator==(const Ch* rhs) const { return *this == GenericValue(StringRef(rhs)); } + +#if RAPIDJSON_HAS_STDSTRING + //! Equal-to operator with string object + /*! \note Requires the definition of the preprocessor symbol \ref RAPIDJSON_HAS_STDSTRING. + */ + bool operator==(const std::basic_string& rhs) const { return *this == GenericValue(StringRef(rhs)); } +#endif + + //! Equal-to operator with primitive types + /*! \tparam T Either \ref Type, \c int, \c unsigned, \c int64_t, \c uint64_t, \c double, \c true, \c false + */ + template RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr,internal::IsGenericValue >), (bool)) operator==(const T& rhs) const { return *this == GenericValue(rhs); } + + //! Not-equal-to operator + /*! \return !(*this == rhs) + */ + template + bool operator!=(const GenericValue& rhs) const { return !(*this == rhs); } + + //! Not-equal-to operator with const C-string pointer + bool operator!=(const Ch* rhs) const { return !(*this == rhs); } + + //! Not-equal-to operator with arbitrary types + /*! \return !(*this == rhs) + */ + template RAPIDJSON_DISABLEIF_RETURN((internal::IsGenericValue), (bool)) operator!=(const T& rhs) const { return !(*this == rhs); } + + //! Equal-to operator with arbitrary types (symmetric version) + /*! \return (rhs == lhs) + */ + template friend RAPIDJSON_DISABLEIF_RETURN((internal::IsGenericValue), (bool)) operator==(const T& lhs, const GenericValue& rhs) { return rhs == lhs; } + + //! Not-Equal-to operator with arbitrary types (symmetric version) + /*! \return !(rhs == lhs) + */ + template friend RAPIDJSON_DISABLEIF_RETURN((internal::IsGenericValue), (bool)) operator!=(const T& lhs, const GenericValue& rhs) { return !(rhs == lhs); } + //@} + + //!@name Type + //@{ + + Type GetType() const { return static_cast(data_.f.flags & kTypeMask); } + bool IsNull() const { return data_.f.flags == kNullFlag; } + bool IsFalse() const { return data_.f.flags == kFalseFlag; } + bool IsTrue() const { return data_.f.flags == kTrueFlag; } + bool IsBool() const { return (data_.f.flags & kBoolFlag) != 0; } + bool IsObject() const { return data_.f.flags == kObjectFlag; } + bool IsArray() const { return data_.f.flags == kArrayFlag; } + bool IsNumber() const { return (data_.f.flags & kNumberFlag) != 0; } + bool IsInt() const { return (data_.f.flags & kIntFlag) != 0; } + bool IsUint() const { return (data_.f.flags & kUintFlag) != 0; } + bool IsInt64() const { return (data_.f.flags & kInt64Flag) != 0; } + bool IsUint64() const { return (data_.f.flags & kUint64Flag) != 0; } + bool IsDouble() const { return (data_.f.flags & kDoubleFlag) != 0; } + bool IsString() const { return (data_.f.flags & kStringFlag) != 0; } + + // Checks whether a number can be losslessly converted to a double. + bool IsLosslessDouble() const { + if (!IsNumber()) return false; + if (IsUint64()) { + uint64_t u = GetUint64(); + volatile double d = static_cast(u); + return (d >= 0.0) + && (d < static_cast(std::numeric_limits::max())) + && (u == static_cast(d)); + } + if (IsInt64()) { + int64_t i = GetInt64(); + volatile double d = static_cast(i); + return (d >= static_cast(std::numeric_limits::min())) + && (d < static_cast(std::numeric_limits::max())) + && (i == static_cast(d)); + } + return true; // double, int, uint are always lossless + } + + // Checks whether a number is a float (possible lossy). + bool IsFloat() const { + if ((data_.f.flags & kDoubleFlag) == 0) + return false; + double d = GetDouble(); + return d >= -3.4028234e38 && d <= 3.4028234e38; + } + // Checks whether a number can be losslessly converted to a float. + bool IsLosslessFloat() const { + if (!IsNumber()) return false; + double a = GetDouble(); + if (a < static_cast(-std::numeric_limits::max()) + || a > static_cast(std::numeric_limits::max())) + return false; + double b = static_cast(static_cast(a)); + return a >= b && a <= b; // Prevent -Wfloat-equal + } + + //@} + + //!@name Null + //@{ + + GenericValue& SetNull() { this->~GenericValue(); new (this) GenericValue(); return *this; } + + //@} + + //!@name Bool + //@{ + + bool GetBool() const { RAPIDJSON_ASSERT(IsBool()); return data_.f.flags == kTrueFlag; } + //!< Set boolean value + /*! \post IsBool() == true */ + GenericValue& SetBool(bool b) { this->~GenericValue(); new (this) GenericValue(b); return *this; } + + //@} + + //!@name Object + //@{ + + //! Set this value as an empty object. + /*! \post IsObject() == true */ + GenericValue& SetObject() { this->~GenericValue(); new (this) GenericValue(kObjectType); return *this; } + + //! Get the number of members in the object. + SizeType MemberCount() const { RAPIDJSON_ASSERT(IsObject()); return data_.o.size; } + + //! Check whether the object is empty. + bool ObjectEmpty() const { RAPIDJSON_ASSERT(IsObject()); return data_.o.size == 0; } + + //! Get a value from an object associated with the name. + /*! \pre IsObject() == true + \tparam T Either \c Ch or \c const \c Ch (template used for disambiguation with \ref operator[](SizeType)) + \note In version 0.1x, if the member is not found, this function returns a null value. This makes issue 7. + Since 0.2, if the name is not correct, it will assert. + If user is unsure whether a member exists, user should use HasMember() first. + A better approach is to use FindMember(). + \note Linear time complexity. + */ + template + RAPIDJSON_DISABLEIF_RETURN((internal::NotExpr::Type, Ch> >),(GenericValue&)) operator[](T* name) { + GenericValue n(StringRef(name)); + return (*this)[n]; + } + template + RAPIDJSON_DISABLEIF_RETURN((internal::NotExpr::Type, Ch> >),(const GenericValue&)) operator[](T* name) const { return const_cast(*this)[name]; } + + //! Get a value from an object associated with the name. + /*! \pre IsObject() == true + \tparam SourceAllocator Allocator of the \c name value + + \note Compared to \ref operator[](T*), this version is faster because it does not need a StrLen(). + And it can also handle strings with embedded null characters. + + \note Linear time complexity. + */ + template + GenericValue& operator[](const GenericValue& name) { + MemberIterator member = FindMember(name); + if (member != MemberEnd()) + return member->value; + else { + RAPIDJSON_ASSERT(false); // see above note + + // This will generate -Wexit-time-destructors in clang + // static GenericValue NullValue; + // return NullValue; + + // Use static buffer and placement-new to prevent destruction + static char buffer[sizeof(GenericValue)]; + return *new (buffer) GenericValue(); + } + } + template + const GenericValue& operator[](const GenericValue& name) const { return const_cast(*this)[name]; } + +#if RAPIDJSON_HAS_STDSTRING + //! Get a value from an object associated with name (string object). + GenericValue& operator[](const std::basic_string& name) { return (*this)[GenericValue(StringRef(name))]; } + const GenericValue& operator[](const std::basic_string& name) const { return (*this)[GenericValue(StringRef(name))]; } +#endif + + //! Const member iterator + /*! \pre IsObject() == true */ + ConstMemberIterator MemberBegin() const { RAPIDJSON_ASSERT(IsObject()); return ConstMemberIterator(GetMembersPointer()); } + //! Const \em past-the-end member iterator + /*! \pre IsObject() == true */ + ConstMemberIterator MemberEnd() const { RAPIDJSON_ASSERT(IsObject()); return ConstMemberIterator(GetMembersPointer() + data_.o.size); } + //! Member iterator + /*! \pre IsObject() == true */ + MemberIterator MemberBegin() { RAPIDJSON_ASSERT(IsObject()); return MemberIterator(GetMembersPointer()); } + //! \em Past-the-end member iterator + /*! \pre IsObject() == true */ + MemberIterator MemberEnd() { RAPIDJSON_ASSERT(IsObject()); return MemberIterator(GetMembersPointer() + data_.o.size); } + + //! Check whether a member exists in the object. + /*! + \param name Member name to be searched. + \pre IsObject() == true + \return Whether a member with that name exists. + \note It is better to use FindMember() directly if you need the obtain the value as well. + \note Linear time complexity. + */ + bool HasMember(const Ch* name) const { return FindMember(name) != MemberEnd(); } + +#if RAPIDJSON_HAS_STDSTRING + //! Check whether a member exists in the object with string object. + /*! + \param name Member name to be searched. + \pre IsObject() == true + \return Whether a member with that name exists. + \note It is better to use FindMember() directly if you need the obtain the value as well. + \note Linear time complexity. + */ + bool HasMember(const std::basic_string& name) const { return FindMember(name) != MemberEnd(); } +#endif + + //! Check whether a member exists in the object with GenericValue name. + /*! + This version is faster because it does not need a StrLen(). It can also handle string with null character. + \param name Member name to be searched. + \pre IsObject() == true + \return Whether a member with that name exists. + \note It is better to use FindMember() directly if you need the obtain the value as well. + \note Linear time complexity. + */ + template + bool HasMember(const GenericValue& name) const { return FindMember(name) != MemberEnd(); } + + //! Find member by name. + /*! + \param name Member name to be searched. + \pre IsObject() == true + \return Iterator to member, if it exists. + Otherwise returns \ref MemberEnd(). + + \note Earlier versions of Rapidjson returned a \c NULL pointer, in case + the requested member doesn't exist. For consistency with e.g. + \c std::map, this has been changed to MemberEnd() now. + \note Linear time complexity. + */ + MemberIterator FindMember(const Ch* name) { + GenericValue n(StringRef(name)); + return FindMember(n); + } + + ConstMemberIterator FindMember(const Ch* name) const { return const_cast(*this).FindMember(name); } + + //! Find member by name. + /*! + This version is faster because it does not need a StrLen(). It can also handle string with null character. + \param name Member name to be searched. + \pre IsObject() == true + \return Iterator to member, if it exists. + Otherwise returns \ref MemberEnd(). + + \note Earlier versions of Rapidjson returned a \c NULL pointer, in case + the requested member doesn't exist. For consistency with e.g. + \c std::map, this has been changed to MemberEnd() now. + \note Linear time complexity. + */ + template + MemberIterator FindMember(const GenericValue& name) { + RAPIDJSON_ASSERT(IsObject()); + RAPIDJSON_ASSERT(name.IsString()); + MemberIterator member = MemberBegin(); + for ( ; member != MemberEnd(); ++member) + if (name.StringEqual(member->name)) + break; + return member; + } + template ConstMemberIterator FindMember(const GenericValue& name) const { return const_cast(*this).FindMember(name); } + +#if RAPIDJSON_HAS_STDSTRING + //! Find member by string object name. + /*! + \param name Member name to be searched. + \pre IsObject() == true + \return Iterator to member, if it exists. + Otherwise returns \ref MemberEnd(). + */ + MemberIterator FindMember(const std::basic_string& name) { return FindMember(GenericValue(StringRef(name))); } + ConstMemberIterator FindMember(const std::basic_string& name) const { return FindMember(GenericValue(StringRef(name))); } +#endif + + //! Add a member (name-value pair) to the object. + /*! \param name A string value as name of member. + \param value Value of any type. + \param allocator Allocator for reallocating memory. It must be the same one as used before. Commonly use GenericDocument::GetAllocator(). + \return The value itself for fluent API. + \note The ownership of \c name and \c value will be transferred to this object on success. + \pre IsObject() && name.IsString() + \post name.IsNull() && value.IsNull() + \note Amortized Constant time complexity. + */ + GenericValue& AddMember(GenericValue& name, GenericValue& value, Allocator& allocator) { + RAPIDJSON_ASSERT(IsObject()); + RAPIDJSON_ASSERT(name.IsString()); + + ObjectData& o = data_.o; + if (o.size >= o.capacity) { + if (o.capacity == 0) { + o.capacity = kDefaultObjectCapacity; + SetMembersPointer(reinterpret_cast(allocator.Malloc(o.capacity * sizeof(Member)))); + } + else { + SizeType oldCapacity = o.capacity; + o.capacity += (oldCapacity + 1) / 2; // grow by factor 1.5 + SetMembersPointer(reinterpret_cast(allocator.Realloc(GetMembersPointer(), oldCapacity * sizeof(Member), o.capacity * sizeof(Member)))); + } + } + Member* members = GetMembersPointer(); + members[o.size].name.RawAssign(name); + members[o.size].value.RawAssign(value); + o.size++; + return *this; + } + + //! Add a constant string value as member (name-value pair) to the object. + /*! \param name A string value as name of member. + \param value constant string reference as value of member. + \param allocator Allocator for reallocating memory. It must be the same one as used before. Commonly use GenericDocument::GetAllocator(). + \return The value itself for fluent API. + \pre IsObject() + \note This overload is needed to avoid clashes with the generic primitive type AddMember(GenericValue&,T,Allocator&) overload below. + \note Amortized Constant time complexity. + */ + GenericValue& AddMember(GenericValue& name, StringRefType value, Allocator& allocator) { + GenericValue v(value); + return AddMember(name, v, allocator); + } + +#if RAPIDJSON_HAS_STDSTRING + //! Add a string object as member (name-value pair) to the object. + /*! \param name A string value as name of member. + \param value constant string reference as value of member. + \param allocator Allocator for reallocating memory. It must be the same one as used before. Commonly use GenericDocument::GetAllocator(). + \return The value itself for fluent API. + \pre IsObject() + \note This overload is needed to avoid clashes with the generic primitive type AddMember(GenericValue&,T,Allocator&) overload below. + \note Amortized Constant time complexity. + */ + GenericValue& AddMember(GenericValue& name, std::basic_string& value, Allocator& allocator) { + GenericValue v(value, allocator); + return AddMember(name, v, allocator); + } +#endif + + //! Add any primitive value as member (name-value pair) to the object. + /*! \tparam T Either \ref Type, \c int, \c unsigned, \c int64_t, \c uint64_t + \param name A string value as name of member. + \param value Value of primitive type \c T as value of member + \param allocator Allocator for reallocating memory. Commonly use GenericDocument::GetAllocator(). + \return The value itself for fluent API. + \pre IsObject() + + \note The source type \c T explicitly disallows all pointer types, + especially (\c const) \ref Ch*. This helps avoiding implicitly + referencing character strings with insufficient lifetime, use + \ref AddMember(StringRefType, GenericValue&, Allocator&) or \ref + AddMember(StringRefType, StringRefType, Allocator&). + All other pointer types would implicitly convert to \c bool, + use an explicit cast instead, if needed. + \note Amortized Constant time complexity. + */ + template + RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (GenericValue&)) + AddMember(GenericValue& name, T value, Allocator& allocator) { + GenericValue v(value); + return AddMember(name, v, allocator); + } + +#if RAPIDJSON_HAS_CXX11_RVALUE_REFS + GenericValue& AddMember(GenericValue&& name, GenericValue&& value, Allocator& allocator) { + return AddMember(name, value, allocator); + } + GenericValue& AddMember(GenericValue&& name, GenericValue& value, Allocator& allocator) { + return AddMember(name, value, allocator); + } + GenericValue& AddMember(GenericValue& name, GenericValue&& value, Allocator& allocator) { + return AddMember(name, value, allocator); + } + GenericValue& AddMember(StringRefType name, GenericValue&& value, Allocator& allocator) { + GenericValue n(name); + return AddMember(n, value, allocator); + } +#endif // RAPIDJSON_HAS_CXX11_RVALUE_REFS + + + //! Add a member (name-value pair) to the object. + /*! \param name A constant string reference as name of member. + \param value Value of any type. + \param allocator Allocator for reallocating memory. It must be the same one as used before. Commonly use GenericDocument::GetAllocator(). + \return The value itself for fluent API. + \note The ownership of \c value will be transferred to this object on success. + \pre IsObject() + \post value.IsNull() + \note Amortized Constant time complexity. + */ + GenericValue& AddMember(StringRefType name, GenericValue& value, Allocator& allocator) { + GenericValue n(name); + return AddMember(n, value, allocator); + } + + //! Add a constant string value as member (name-value pair) to the object. + /*! \param name A constant string reference as name of member. + \param value constant string reference as value of member. + \param allocator Allocator for reallocating memory. It must be the same one as used before. Commonly use GenericDocument::GetAllocator(). + \return The value itself for fluent API. + \pre IsObject() + \note This overload is needed to avoid clashes with the generic primitive type AddMember(StringRefType,T,Allocator&) overload below. + \note Amortized Constant time complexity. + */ + GenericValue& AddMember(StringRefType name, StringRefType value, Allocator& allocator) { + GenericValue v(value); + return AddMember(name, v, allocator); + } + + //! Add any primitive value as member (name-value pair) to the object. + /*! \tparam T Either \ref Type, \c int, \c unsigned, \c int64_t, \c uint64_t + \param name A constant string reference as name of member. + \param value Value of primitive type \c T as value of member + \param allocator Allocator for reallocating memory. Commonly use GenericDocument::GetAllocator(). + \return The value itself for fluent API. + \pre IsObject() + + \note The source type \c T explicitly disallows all pointer types, + especially (\c const) \ref Ch*. This helps avoiding implicitly + referencing character strings with insufficient lifetime, use + \ref AddMember(StringRefType, GenericValue&, Allocator&) or \ref + AddMember(StringRefType, StringRefType, Allocator&). + All other pointer types would implicitly convert to \c bool, + use an explicit cast instead, if needed. + \note Amortized Constant time complexity. + */ + template + RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (GenericValue&)) + AddMember(StringRefType name, T value, Allocator& allocator) { + GenericValue n(name); + return AddMember(n, value, allocator); + } + + //! Remove all members in the object. + /*! This function do not deallocate memory in the object, i.e. the capacity is unchanged. + \note Linear time complexity. + */ + void RemoveAllMembers() { + RAPIDJSON_ASSERT(IsObject()); + for (MemberIterator m = MemberBegin(); m != MemberEnd(); ++m) + m->~Member(); + data_.o.size = 0; + } + + //! Remove a member in object by its name. + /*! \param name Name of member to be removed. + \return Whether the member existed. + \note This function may reorder the object members. Use \ref + EraseMember(ConstMemberIterator) if you need to preserve the + relative order of the remaining members. + \note Linear time complexity. + */ + bool RemoveMember(const Ch* name) { + GenericValue n(StringRef(name)); + return RemoveMember(n); + } + +#if RAPIDJSON_HAS_STDSTRING + bool RemoveMember(const std::basic_string& name) { return RemoveMember(GenericValue(StringRef(name))); } +#endif + + template + bool RemoveMember(const GenericValue& name) { + MemberIterator m = FindMember(name); + if (m != MemberEnd()) { + RemoveMember(m); + return true; + } + else + return false; + } + + //! Remove a member in object by iterator. + /*! \param m member iterator (obtained by FindMember() or MemberBegin()). + \return the new iterator after removal. + \note This function may reorder the object members. Use \ref + EraseMember(ConstMemberIterator) if you need to preserve the + relative order of the remaining members. + \note Constant time complexity. + */ + MemberIterator RemoveMember(MemberIterator m) { + RAPIDJSON_ASSERT(IsObject()); + RAPIDJSON_ASSERT(data_.o.size > 0); + RAPIDJSON_ASSERT(GetMembersPointer() != 0); + RAPIDJSON_ASSERT(m >= MemberBegin() && m < MemberEnd()); + + MemberIterator last(GetMembersPointer() + (data_.o.size - 1)); + if (data_.o.size > 1 && m != last) + *m = *last; // Move the last one to this place + else + m->~Member(); // Only one left, just destroy + --data_.o.size; + return m; + } + + //! Remove a member from an object by iterator. + /*! \param pos iterator to the member to remove + \pre IsObject() == true && \ref MemberBegin() <= \c pos < \ref MemberEnd() + \return Iterator following the removed element. + If the iterator \c pos refers to the last element, the \ref MemberEnd() iterator is returned. + \note This function preserves the relative order of the remaining object + members. If you do not need this, use the more efficient \ref RemoveMember(MemberIterator). + \note Linear time complexity. + */ + MemberIterator EraseMember(ConstMemberIterator pos) { + return EraseMember(pos, pos +1); + } + + //! Remove members in the range [first, last) from an object. + /*! \param first iterator to the first member to remove + \param last iterator following the last member to remove + \pre IsObject() == true && \ref MemberBegin() <= \c first <= \c last <= \ref MemberEnd() + \return Iterator following the last removed element. + \note This function preserves the relative order of the remaining object + members. + \note Linear time complexity. + */ + MemberIterator EraseMember(ConstMemberIterator first, ConstMemberIterator last) { + RAPIDJSON_ASSERT(IsObject()); + RAPIDJSON_ASSERT(data_.o.size > 0); + RAPIDJSON_ASSERT(GetMembersPointer() != 0); + RAPIDJSON_ASSERT(first >= MemberBegin()); + RAPIDJSON_ASSERT(first <= last); + RAPIDJSON_ASSERT(last <= MemberEnd()); + + MemberIterator pos = MemberBegin() + (first - MemberBegin()); + for (MemberIterator itr = pos; itr != last; ++itr) + itr->~Member(); + std::memmove(&*pos, &*last, static_cast(MemberEnd() - last) * sizeof(Member)); + data_.o.size -= static_cast(last - first); + return pos; + } + + //! Erase a member in object by its name. + /*! \param name Name of member to be removed. + \return Whether the member existed. + \note Linear time complexity. + */ + bool EraseMember(const Ch* name) { + GenericValue n(StringRef(name)); + return EraseMember(n); + } + +#if RAPIDJSON_HAS_STDSTRING + bool EraseMember(const std::basic_string& name) { return EraseMember(GenericValue(StringRef(name))); } +#endif + + template + bool EraseMember(const GenericValue& name) { + MemberIterator m = FindMember(name); + if (m != MemberEnd()) { + EraseMember(m); + return true; + } + else + return false; + } + + Object GetObject() { RAPIDJSON_ASSERT(IsObject()); return Object(*this); } + ConstObject GetObject() const { RAPIDJSON_ASSERT(IsObject()); return ConstObject(*this); } + + //@} + + //!@name Array + //@{ + + //! Set this value as an empty array. + /*! \post IsArray == true */ + GenericValue& SetArray() { this->~GenericValue(); new (this) GenericValue(kArrayType); return *this; } + + //! Get the number of elements in array. + SizeType Size() const { RAPIDJSON_ASSERT(IsArray()); return data_.a.size; } + + //! Get the capacity of array. + SizeType Capacity() const { RAPIDJSON_ASSERT(IsArray()); return data_.a.capacity; } + + //! Check whether the array is empty. + bool Empty() const { RAPIDJSON_ASSERT(IsArray()); return data_.a.size == 0; } + + //! Remove all elements in the array. + /*! This function do not deallocate memory in the array, i.e. the capacity is unchanged. + \note Linear time complexity. + */ + void Clear() { + RAPIDJSON_ASSERT(IsArray()); + GenericValue* e = GetElementsPointer(); + for (GenericValue* v = e; v != e + data_.a.size; ++v) + v->~GenericValue(); + data_.a.size = 0; + } + + //! Get an element from array by index. + /*! \pre IsArray() == true + \param index Zero-based index of element. + \see operator[](T*) + */ + GenericValue& operator[](SizeType index) { + RAPIDJSON_ASSERT(IsArray()); + RAPIDJSON_ASSERT(index < data_.a.size); + return GetElementsPointer()[index]; + } + const GenericValue& operator[](SizeType index) const { return const_cast(*this)[index]; } + + //! Element iterator + /*! \pre IsArray() == true */ + ValueIterator Begin() { RAPIDJSON_ASSERT(IsArray()); return GetElementsPointer(); } + //! \em Past-the-end element iterator + /*! \pre IsArray() == true */ + ValueIterator End() { RAPIDJSON_ASSERT(IsArray()); return GetElementsPointer() + data_.a.size; } + //! Constant element iterator + /*! \pre IsArray() == true */ + ConstValueIterator Begin() const { return const_cast(*this).Begin(); } + //! Constant \em past-the-end element iterator + /*! \pre IsArray() == true */ + ConstValueIterator End() const { return const_cast(*this).End(); } + + //! Request the array to have enough capacity to store elements. + /*! \param newCapacity The capacity that the array at least need to have. + \param allocator Allocator for reallocating memory. It must be the same one as used before. Commonly use GenericDocument::GetAllocator(). + \return The value itself for fluent API. + \note Linear time complexity. + */ + GenericValue& Reserve(SizeType newCapacity, Allocator &allocator) { + RAPIDJSON_ASSERT(IsArray()); + if (newCapacity > data_.a.capacity) { + SetElementsPointer(reinterpret_cast(allocator.Realloc(GetElementsPointer(), data_.a.capacity * sizeof(GenericValue), newCapacity * sizeof(GenericValue)))); + data_.a.capacity = newCapacity; + } + return *this; + } + + //! Append a GenericValue at the end of the array. + /*! \param value Value to be appended. + \param allocator Allocator for reallocating memory. It must be the same one as used before. Commonly use GenericDocument::GetAllocator(). + \pre IsArray() == true + \post value.IsNull() == true + \return The value itself for fluent API. + \note The ownership of \c value will be transferred to this array on success. + \note If the number of elements to be appended is known, calls Reserve() once first may be more efficient. + \note Amortized constant time complexity. + */ + GenericValue& PushBack(GenericValue& value, Allocator& allocator) { + RAPIDJSON_ASSERT(IsArray()); + if (data_.a.size >= data_.a.capacity) + Reserve(data_.a.capacity == 0 ? kDefaultArrayCapacity : (data_.a.capacity + (data_.a.capacity + 1) / 2), allocator); + GetElementsPointer()[data_.a.size++].RawAssign(value); + return *this; + } + +#if RAPIDJSON_HAS_CXX11_RVALUE_REFS + GenericValue& PushBack(GenericValue&& value, Allocator& allocator) { + return PushBack(value, allocator); + } +#endif // RAPIDJSON_HAS_CXX11_RVALUE_REFS + + //! Append a constant string reference at the end of the array. + /*! \param value Constant string reference to be appended. + \param allocator Allocator for reallocating memory. It must be the same one used previously. Commonly use GenericDocument::GetAllocator(). + \pre IsArray() == true + \return The value itself for fluent API. + \note If the number of elements to be appended is known, calls Reserve() once first may be more efficient. + \note Amortized constant time complexity. + \see GenericStringRef + */ + GenericValue& PushBack(StringRefType value, Allocator& allocator) { + return (*this).template PushBack(value, allocator); + } + + //! Append a primitive value at the end of the array. + /*! \tparam T Either \ref Type, \c int, \c unsigned, \c int64_t, \c uint64_t + \param value Value of primitive type T to be appended. + \param allocator Allocator for reallocating memory. It must be the same one as used before. Commonly use GenericDocument::GetAllocator(). + \pre IsArray() == true + \return The value itself for fluent API. + \note If the number of elements to be appended is known, calls Reserve() once first may be more efficient. + + \note The source type \c T explicitly disallows all pointer types, + especially (\c const) \ref Ch*. This helps avoiding implicitly + referencing character strings with insufficient lifetime, use + \ref PushBack(GenericValue&, Allocator&) or \ref + PushBack(StringRefType, Allocator&). + All other pointer types would implicitly convert to \c bool, + use an explicit cast instead, if needed. + \note Amortized constant time complexity. + */ + template + RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (GenericValue&)) + PushBack(T value, Allocator& allocator) { + GenericValue v(value); + return PushBack(v, allocator); + } + + //! Remove the last element in the array. + /*! + \note Constant time complexity. + */ + GenericValue& PopBack() { + RAPIDJSON_ASSERT(IsArray()); + RAPIDJSON_ASSERT(!Empty()); + GetElementsPointer()[--data_.a.size].~GenericValue(); + return *this; + } + + //! Remove an element of array by iterator. + /*! + \param pos iterator to the element to remove + \pre IsArray() == true && \ref Begin() <= \c pos < \ref End() + \return Iterator following the removed element. If the iterator pos refers to the last element, the End() iterator is returned. + \note Linear time complexity. + */ + ValueIterator Erase(ConstValueIterator pos) { + return Erase(pos, pos + 1); + } + + //! Remove elements in the range [first, last) of the array. + /*! + \param first iterator to the first element to remove + \param last iterator following the last element to remove + \pre IsArray() == true && \ref Begin() <= \c first <= \c last <= \ref End() + \return Iterator following the last removed element. + \note Linear time complexity. + */ + ValueIterator Erase(ConstValueIterator first, ConstValueIterator last) { + RAPIDJSON_ASSERT(IsArray()); + RAPIDJSON_ASSERT(data_.a.size > 0); + RAPIDJSON_ASSERT(GetElementsPointer() != 0); + RAPIDJSON_ASSERT(first >= Begin()); + RAPIDJSON_ASSERT(first <= last); + RAPIDJSON_ASSERT(last <= End()); + ValueIterator pos = Begin() + (first - Begin()); + for (ValueIterator itr = pos; itr != last; ++itr) + itr->~GenericValue(); + std::memmove(pos, last, static_cast(End() - last) * sizeof(GenericValue)); + data_.a.size -= static_cast(last - first); + return pos; + } + + Array GetArray() { RAPIDJSON_ASSERT(IsArray()); return Array(*this); } + ConstArray GetArray() const { RAPIDJSON_ASSERT(IsArray()); return ConstArray(*this); } + + //@} + + //!@name Number + //@{ + + int GetInt() const { RAPIDJSON_ASSERT(data_.f.flags & kIntFlag); return data_.n.i.i; } + unsigned GetUint() const { RAPIDJSON_ASSERT(data_.f.flags & kUintFlag); return data_.n.u.u; } + int64_t GetInt64() const { RAPIDJSON_ASSERT(data_.f.flags & kInt64Flag); return data_.n.i64; } + uint64_t GetUint64() const { RAPIDJSON_ASSERT(data_.f.flags & kUint64Flag); return data_.n.u64; } + + //! Get the value as double type. + /*! \note If the value is 64-bit integer type, it may lose precision. Use \c IsLosslessDouble() to check whether the converison is lossless. + */ + double GetDouble() const { + RAPIDJSON_ASSERT(IsNumber()); + if ((data_.f.flags & kDoubleFlag) != 0) return data_.n.d; // exact type, no conversion. + if ((data_.f.flags & kIntFlag) != 0) return data_.n.i.i; // int -> double + if ((data_.f.flags & kUintFlag) != 0) return data_.n.u.u; // unsigned -> double + if ((data_.f.flags & kInt64Flag) != 0) return static_cast(data_.n.i64); // int64_t -> double (may lose precision) + RAPIDJSON_ASSERT((data_.f.flags & kUint64Flag) != 0); return static_cast(data_.n.u64); // uint64_t -> double (may lose precision) + } + + //! Get the value as float type. + /*! \note If the value is 64-bit integer type, it may lose precision. Use \c IsLosslessFloat() to check whether the converison is lossless. + */ + float GetFloat() const { + return static_cast(GetDouble()); + } + + GenericValue& SetInt(int i) { this->~GenericValue(); new (this) GenericValue(i); return *this; } + GenericValue& SetUint(unsigned u) { this->~GenericValue(); new (this) GenericValue(u); return *this; } + GenericValue& SetInt64(int64_t i64) { this->~GenericValue(); new (this) GenericValue(i64); return *this; } + GenericValue& SetUint64(uint64_t u64) { this->~GenericValue(); new (this) GenericValue(u64); return *this; } + GenericValue& SetDouble(double d) { this->~GenericValue(); new (this) GenericValue(d); return *this; } + GenericValue& SetFloat(float f) { this->~GenericValue(); new (this) GenericValue(f); return *this; } + + //@} + + //!@name String + //@{ + + const Ch* GetString() const { RAPIDJSON_ASSERT(IsString()); return (data_.f.flags & kInlineStrFlag) ? data_.ss.str : GetStringPointer(); } + + //! Get the length of string. + /*! Since rapidjson permits "\\u0000" in the json string, strlen(v.GetString()) may not equal to v.GetStringLength(). + */ + SizeType GetStringLength() const { RAPIDJSON_ASSERT(IsString()); return ((data_.f.flags & kInlineStrFlag) ? (data_.ss.GetLength()) : data_.s.length); } + + //! Set this value as a string without copying source string. + /*! This version has better performance with supplied length, and also support string containing null character. + \param s source string pointer. + \param length The length of source string, excluding the trailing null terminator. + \return The value itself for fluent API. + \post IsString() == true && GetString() == s && GetStringLength() == length + \see SetString(StringRefType) + */ + GenericValue& SetString(const Ch* s, SizeType length) { return SetString(StringRef(s, length)); } + + //! Set this value as a string without copying source string. + /*! \param s source string reference + \return The value itself for fluent API. + \post IsString() == true && GetString() == s && GetStringLength() == s.length + */ + GenericValue& SetString(StringRefType s) { this->~GenericValue(); SetStringRaw(s); return *this; } + + //! Set this value as a string by copying from source string. + /*! This version has better performance with supplied length, and also support string containing null character. + \param s source string. + \param length The length of source string, excluding the trailing null terminator. + \param allocator Allocator for allocating copied buffer. Commonly use GenericDocument::GetAllocator(). + \return The value itself for fluent API. + \post IsString() == true && GetString() != s && strcmp(GetString(),s) == 0 && GetStringLength() == length + */ + GenericValue& SetString(const Ch* s, SizeType length, Allocator& allocator) { this->~GenericValue(); SetStringRaw(StringRef(s, length), allocator); return *this; } + + //! Set this value as a string by copying from source string. + /*! \param s source string. + \param allocator Allocator for allocating copied buffer. Commonly use GenericDocument::GetAllocator(). + \return The value itself for fluent API. + \post IsString() == true && GetString() != s && strcmp(GetString(),s) == 0 && GetStringLength() == length + */ + GenericValue& SetString(const Ch* s, Allocator& allocator) { return SetString(s, internal::StrLen(s), allocator); } + +#if RAPIDJSON_HAS_STDSTRING + //! Set this value as a string by copying from source string. + /*! \param s source string. + \param allocator Allocator for allocating copied buffer. Commonly use GenericDocument::GetAllocator(). + \return The value itself for fluent API. + \post IsString() == true && GetString() != s.data() && strcmp(GetString(),s.data() == 0 && GetStringLength() == s.size() + \note Requires the definition of the preprocessor symbol \ref RAPIDJSON_HAS_STDSTRING. + */ + GenericValue& SetString(const std::basic_string& s, Allocator& allocator) { return SetString(s.data(), SizeType(s.size()), allocator); } +#endif + + //@} + + //!@name Array + //@{ + + //! Templated version for checking whether this value is type T. + /*! + \tparam T Either \c bool, \c int, \c unsigned, \c int64_t, \c uint64_t, \c double, \c float, \c const \c char*, \c std::basic_string + */ + template + bool Is() const { return internal::TypeHelper::Is(*this); } + + template + T Get() const { return internal::TypeHelper::Get(*this); } + + template + T Get() { return internal::TypeHelper::Get(*this); } + + template + ValueType& Set(const T& data) { return internal::TypeHelper::Set(*this, data); } + + template + ValueType& Set(const T& data, AllocatorType& allocator) { return internal::TypeHelper::Set(*this, data, allocator); } + + //@} + + //! Generate events of this value to a Handler. + /*! This function adopts the GoF visitor pattern. + Typical usage is to output this JSON value as JSON text via Writer, which is a Handler. + It can also be used to deep clone this value via GenericDocument, which is also a Handler. + \tparam Handler type of handler. + \param handler An object implementing concept Handler. + */ + template + bool Accept(Handler& handler) const { + switch(GetType()) { + case kNullType: return handler.Null(); + case kFalseType: return handler.Bool(false); + case kTrueType: return handler.Bool(true); + + case kObjectType: + if (RAPIDJSON_UNLIKELY(!handler.StartObject())) + return false; + for (ConstMemberIterator m = MemberBegin(); m != MemberEnd(); ++m) { + RAPIDJSON_ASSERT(m->name.IsString()); // User may change the type of name by MemberIterator. + if (RAPIDJSON_UNLIKELY(!handler.Key(m->name.GetString(), m->name.GetStringLength(), (m->name.data_.f.flags & kCopyFlag) != 0))) + return false; + if (RAPIDJSON_UNLIKELY(!m->value.Accept(handler))) + return false; + } + return handler.EndObject(data_.o.size); + + case kArrayType: + if (RAPIDJSON_UNLIKELY(!handler.StartArray())) + return false; + for (const GenericValue* v = Begin(); v != End(); ++v) + if (RAPIDJSON_UNLIKELY(!v->Accept(handler))) + return false; + return handler.EndArray(data_.a.size); + + case kStringType: + return handler.String(GetString(), GetStringLength(), (data_.f.flags & kCopyFlag) != 0); + + default: + RAPIDJSON_ASSERT(GetType() == kNumberType); + if (IsDouble()) return handler.Double(data_.n.d); + else if (IsInt()) return handler.Int(data_.n.i.i); + else if (IsUint()) return handler.Uint(data_.n.u.u); + else if (IsInt64()) return handler.Int64(data_.n.i64); + else return handler.Uint64(data_.n.u64); + } + } + +private: + template friend class GenericValue; + template friend class GenericDocument; + + enum { + kBoolFlag = 0x0008, + kNumberFlag = 0x0010, + kIntFlag = 0x0020, + kUintFlag = 0x0040, + kInt64Flag = 0x0080, + kUint64Flag = 0x0100, + kDoubleFlag = 0x0200, + kStringFlag = 0x0400, + kCopyFlag = 0x0800, + kInlineStrFlag = 0x1000, + + // Initial flags of different types. + kNullFlag = kNullType, + kTrueFlag = kTrueType | kBoolFlag, + kFalseFlag = kFalseType | kBoolFlag, + kNumberIntFlag = kNumberType | kNumberFlag | kIntFlag | kInt64Flag, + kNumberUintFlag = kNumberType | kNumberFlag | kUintFlag | kUint64Flag | kInt64Flag, + kNumberInt64Flag = kNumberType | kNumberFlag | kInt64Flag, + kNumberUint64Flag = kNumberType | kNumberFlag | kUint64Flag, + kNumberDoubleFlag = kNumberType | kNumberFlag | kDoubleFlag, + kNumberAnyFlag = kNumberType | kNumberFlag | kIntFlag | kInt64Flag | kUintFlag | kUint64Flag | kDoubleFlag, + kConstStringFlag = kStringType | kStringFlag, + kCopyStringFlag = kStringType | kStringFlag | kCopyFlag, + kShortStringFlag = kStringType | kStringFlag | kCopyFlag | kInlineStrFlag, + kObjectFlag = kObjectType, + kArrayFlag = kArrayType, + + kTypeMask = 0x07 + }; + + static const SizeType kDefaultArrayCapacity = 16; + static const SizeType kDefaultObjectCapacity = 16; + + struct Flag { +#if RAPIDJSON_48BITPOINTER_OPTIMIZATION + char payload[sizeof(SizeType) * 2 + 6]; // 2 x SizeType + lower 48-bit pointer +#elif RAPIDJSON_64BIT + char payload[sizeof(SizeType) * 2 + sizeof(void*) + 6]; // 6 padding bytes +#else + char payload[sizeof(SizeType) * 2 + sizeof(void*) + 2]; // 2 padding bytes +#endif + uint16_t flags; + }; + + struct String { + SizeType length; + SizeType hashcode; //!< reserved + const Ch* str; + }; // 12 bytes in 32-bit mode, 16 bytes in 64-bit mode + + // implementation detail: ShortString can represent zero-terminated strings up to MaxSize chars + // (excluding the terminating zero) and store a value to determine the length of the contained + // string in the last character str[LenPos] by storing "MaxSize - length" there. If the string + // to store has the maximal length of MaxSize then str[LenPos] will be 0 and therefore act as + // the string terminator as well. For getting the string length back from that value just use + // "MaxSize - str[LenPos]". + // This allows to store 13-chars strings in 32-bit mode, 21-chars strings in 64-bit mode, + // 13-chars strings for RAPIDJSON_48BITPOINTER_OPTIMIZATION=1 inline (for `UTF8`-encoded strings). + struct ShortString { + enum { MaxChars = sizeof(static_cast(0)->payload) / sizeof(Ch), MaxSize = MaxChars - 1, LenPos = MaxSize }; + Ch str[MaxChars]; + + inline static bool Usable(SizeType len) { return (MaxSize >= len); } + inline void SetLength(SizeType len) { str[LenPos] = static_cast(MaxSize - len); } + inline SizeType GetLength() const { return static_cast(MaxSize - str[LenPos]); } + }; // at most as many bytes as "String" above => 12 bytes in 32-bit mode, 16 bytes in 64-bit mode + + // By using proper binary layout, retrieval of different integer types do not need conversions. + union Number { +#if RAPIDJSON_ENDIAN == RAPIDJSON_LITTLEENDIAN + struct I { + int i; + char padding[4]; + }i; + struct U { + unsigned u; + char padding2[4]; + }u; +#else + struct I { + char padding[4]; + int i; + }i; + struct U { + char padding2[4]; + unsigned u; + }u; +#endif + int64_t i64; + uint64_t u64; + double d; + }; // 8 bytes + + struct ObjectData { + SizeType size; + SizeType capacity; + Member* members; + }; // 12 bytes in 32-bit mode, 16 bytes in 64-bit mode + + struct ArrayData { + SizeType size; + SizeType capacity; + GenericValue* elements; + }; // 12 bytes in 32-bit mode, 16 bytes in 64-bit mode + + union Data { + String s; + ShortString ss; + Number n; + ObjectData o; + ArrayData a; + Flag f; + }; // 16 bytes in 32-bit mode, 24 bytes in 64-bit mode, 16 bytes in 64-bit with RAPIDJSON_48BITPOINTER_OPTIMIZATION + + RAPIDJSON_FORCEINLINE const Ch* GetStringPointer() const { return RAPIDJSON_GETPOINTER(Ch, data_.s.str); } + RAPIDJSON_FORCEINLINE const Ch* SetStringPointer(const Ch* str) { return RAPIDJSON_SETPOINTER(Ch, data_.s.str, str); } + RAPIDJSON_FORCEINLINE GenericValue* GetElementsPointer() const { return RAPIDJSON_GETPOINTER(GenericValue, data_.a.elements); } + RAPIDJSON_FORCEINLINE GenericValue* SetElementsPointer(GenericValue* elements) { return RAPIDJSON_SETPOINTER(GenericValue, data_.a.elements, elements); } + RAPIDJSON_FORCEINLINE Member* GetMembersPointer() const { return RAPIDJSON_GETPOINTER(Member, data_.o.members); } + RAPIDJSON_FORCEINLINE Member* SetMembersPointer(Member* members) { return RAPIDJSON_SETPOINTER(Member, data_.o.members, members); } + + // Initialize this value as array with initial data, without calling destructor. + void SetArrayRaw(GenericValue* values, SizeType count, Allocator& allocator) { + data_.f.flags = kArrayFlag; + if (count) { + GenericValue* e = static_cast(allocator.Malloc(count * sizeof(GenericValue))); + SetElementsPointer(e); + std::memcpy(reinterpret_cast(e), reinterpret_cast(values), count * sizeof(GenericValue)); + } + else + SetElementsPointer(0); + data_.a.size = data_.a.capacity = count; + } + + //! Initialize this value as object with initial data, without calling destructor. + void SetObjectRaw(Member* members, SizeType count, Allocator& allocator) { + data_.f.flags = kObjectFlag; + if (count) { + Member* m = static_cast(allocator.Malloc(count * sizeof(Member))); + SetMembersPointer(m); + std::memcpy(reinterpret_cast(m), reinterpret_cast(members), count * sizeof(Member)); + } + else + SetMembersPointer(0); + data_.o.size = data_.o.capacity = count; + } + + //! Initialize this value as constant string, without calling destructor. + void SetStringRaw(StringRefType s) RAPIDJSON_NOEXCEPT { + data_.f.flags = kConstStringFlag; + SetStringPointer(s); + data_.s.length = s.length; + } + + //! Initialize this value as copy string with initial data, without calling destructor. + void SetStringRaw(StringRefType s, Allocator& allocator) { + Ch* str = 0; + if (ShortString::Usable(s.length)) { + data_.f.flags = kShortStringFlag; + data_.ss.SetLength(s.length); + str = data_.ss.str; + } else { + data_.f.flags = kCopyStringFlag; + data_.s.length = s.length; + str = static_cast(allocator.Malloc((s.length + 1) * sizeof(Ch))); + SetStringPointer(str); + } + std::memcpy(str, s, s.length * sizeof(Ch)); + str[s.length] = '\0'; + } + + //! Assignment without calling destructor + void RawAssign(GenericValue& rhs) RAPIDJSON_NOEXCEPT { + data_ = rhs.data_; + // data_.f.flags = rhs.data_.f.flags; + rhs.data_.f.flags = kNullFlag; + } + + template + bool StringEqual(const GenericValue& rhs) const { + RAPIDJSON_ASSERT(IsString()); + RAPIDJSON_ASSERT(rhs.IsString()); + + const SizeType len1 = GetStringLength(); + const SizeType len2 = rhs.GetStringLength(); + if(len1 != len2) { return false; } + + const Ch* const str1 = GetString(); + const Ch* const str2 = rhs.GetString(); + if(str1 == str2) { return true; } // fast path for constant string + + return (std::memcmp(str1, str2, sizeof(Ch) * len1) == 0); + } + + Data data_; +}; + +//! GenericValue with UTF8 encoding +typedef GenericValue > Value; + +/////////////////////////////////////////////////////////////////////////////// +// GenericDocument + +//! A document for parsing JSON text as DOM. +/*! + \note implements Handler concept + \tparam Encoding Encoding for both parsing and string storage. + \tparam Allocator Allocator for allocating memory for the DOM + \tparam StackAllocator Allocator for allocating memory for stack during parsing. + \warning Although GenericDocument inherits from GenericValue, the API does \b not provide any virtual functions, especially no virtual destructor. To avoid memory leaks, do not \c delete a GenericDocument object via a pointer to a GenericValue. +*/ +template , typename StackAllocator = CrtAllocator> +class GenericDocument : public GenericValue { +public: + typedef typename Encoding::Ch Ch; //!< Character type derived from Encoding. + typedef GenericValue ValueType; //!< Value type of the document. + typedef Allocator AllocatorType; //!< Allocator type from template parameter. + + //! Constructor + /*! Creates an empty document of specified type. + \param type Mandatory type of object to create. + \param allocator Optional allocator for allocating memory. + \param stackCapacity Optional initial capacity of stack in bytes. + \param stackAllocator Optional allocator for allocating memory for stack. + */ + explicit GenericDocument(Type type, Allocator* allocator = 0, size_t stackCapacity = kDefaultStackCapacity, StackAllocator* stackAllocator = 0) : + GenericValue(type), allocator_(allocator), ownAllocator_(0), stack_(stackAllocator, stackCapacity), parseResult_() + { + if (!allocator_) + ownAllocator_ = allocator_ = RAPIDJSON_NEW(Allocator()); + } + + //! Constructor + /*! Creates an empty document which type is Null. + \param allocator Optional allocator for allocating memory. + \param stackCapacity Optional initial capacity of stack in bytes. + \param stackAllocator Optional allocator for allocating memory for stack. + */ + GenericDocument(Allocator* allocator = 0, size_t stackCapacity = kDefaultStackCapacity, StackAllocator* stackAllocator = 0) : + allocator_(allocator), ownAllocator_(0), stack_(stackAllocator, stackCapacity), parseResult_() + { + if (!allocator_) + ownAllocator_ = allocator_ = RAPIDJSON_NEW(Allocator()); + } + +#if RAPIDJSON_HAS_CXX11_RVALUE_REFS + //! Move constructor in C++11 + GenericDocument(GenericDocument&& rhs) RAPIDJSON_NOEXCEPT + : ValueType(std::forward(rhs)), // explicit cast to avoid prohibited move from Document + allocator_(rhs.allocator_), + ownAllocator_(rhs.ownAllocator_), + stack_(std::move(rhs.stack_)), + parseResult_(rhs.parseResult_) + { + rhs.allocator_ = 0; + rhs.ownAllocator_ = 0; + rhs.parseResult_ = ParseResult(); + } +#endif + + ~GenericDocument() { + Destroy(); + } + +#if RAPIDJSON_HAS_CXX11_RVALUE_REFS + //! Move assignment in C++11 + GenericDocument& operator=(GenericDocument&& rhs) RAPIDJSON_NOEXCEPT + { + // The cast to ValueType is necessary here, because otherwise it would + // attempt to call GenericValue's templated assignment operator. + ValueType::operator=(std::forward(rhs)); + + // Calling the destructor here would prematurely call stack_'s destructor + Destroy(); + + allocator_ = rhs.allocator_; + ownAllocator_ = rhs.ownAllocator_; + stack_ = std::move(rhs.stack_); + parseResult_ = rhs.parseResult_; + + rhs.allocator_ = 0; + rhs.ownAllocator_ = 0; + rhs.parseResult_ = ParseResult(); + + return *this; + } +#endif + + //! Exchange the contents of this document with those of another. + /*! + \param rhs Another document. + \note Constant complexity. + \see GenericValue::Swap + */ + GenericDocument& Swap(GenericDocument& rhs) RAPIDJSON_NOEXCEPT { + ValueType::Swap(rhs); + stack_.Swap(rhs.stack_); + internal::Swap(allocator_, rhs.allocator_); + internal::Swap(ownAllocator_, rhs.ownAllocator_); + internal::Swap(parseResult_, rhs.parseResult_); + return *this; + } + + //! free-standing swap function helper + /*! + Helper function to enable support for common swap implementation pattern based on \c std::swap: + \code + void swap(MyClass& a, MyClass& b) { + using std::swap; + swap(a.doc, b.doc); + // ... + } + \endcode + \see Swap() + */ + friend inline void swap(GenericDocument& a, GenericDocument& b) RAPIDJSON_NOEXCEPT { a.Swap(b); } + + //! Populate this document by a generator which produces SAX events. + /*! \tparam Generator A functor with bool f(Handler) prototype. + \param g Generator functor which sends SAX events to the parameter. + \return The document itself for fluent API. + */ + template + GenericDocument& Populate(Generator& g) { + ClearStackOnExit scope(*this); + if (g(*this)) { + RAPIDJSON_ASSERT(stack_.GetSize() == sizeof(ValueType)); // Got one and only one root object + ValueType::operator=(*stack_.template Pop(1));// Move value from stack to document + } + return *this; + } + + //!@name Parse from stream + //!@{ + + //! Parse JSON text from an input stream (with Encoding conversion) + /*! \tparam parseFlags Combination of \ref ParseFlag. + \tparam SourceEncoding Encoding of input stream + \tparam InputStream Type of input stream, implementing Stream concept + \param is Input stream to be parsed. + \return The document itself for fluent API. + */ + template + GenericDocument& ParseStream(InputStream& is) { + GenericReader reader( + stack_.HasAllocator() ? &stack_.GetAllocator() : 0); + ClearStackOnExit scope(*this); + parseResult_ = reader.template Parse(is, *this); + if (parseResult_) { + RAPIDJSON_ASSERT(stack_.GetSize() == sizeof(ValueType)); // Got one and only one root object + ValueType::operator=(*stack_.template Pop(1));// Move value from stack to document + } + return *this; + } + + //! Parse JSON text from an input stream + /*! \tparam parseFlags Combination of \ref ParseFlag. + \tparam InputStream Type of input stream, implementing Stream concept + \param is Input stream to be parsed. + \return The document itself for fluent API. + */ + template + GenericDocument& ParseStream(InputStream& is) { + return ParseStream(is); + } + + //! Parse JSON text from an input stream (with \ref kParseDefaultFlags) + /*! \tparam InputStream Type of input stream, implementing Stream concept + \param is Input stream to be parsed. + \return The document itself for fluent API. + */ + template + GenericDocument& ParseStream(InputStream& is) { + return ParseStream(is); + } + //!@} + + //!@name Parse in-place from mutable string + //!@{ + + //! Parse JSON text from a mutable string + /*! \tparam parseFlags Combination of \ref ParseFlag. + \param str Mutable zero-terminated string to be parsed. + \return The document itself for fluent API. + */ + template + GenericDocument& ParseInsitu(Ch* str) { + GenericInsituStringStream s(str); + return ParseStream(s); + } + + //! Parse JSON text from a mutable string (with \ref kParseDefaultFlags) + /*! \param str Mutable zero-terminated string to be parsed. + \return The document itself for fluent API. + */ + GenericDocument& ParseInsitu(Ch* str) { + return ParseInsitu(str); + } + //!@} + + //!@name Parse from read-only string + //!@{ + + //! Parse JSON text from a read-only string (with Encoding conversion) + /*! \tparam parseFlags Combination of \ref ParseFlag (must not contain \ref kParseInsituFlag). + \tparam SourceEncoding Transcoding from input Encoding + \param str Read-only zero-terminated string to be parsed. + */ + template + GenericDocument& Parse(const typename SourceEncoding::Ch* str) { + RAPIDJSON_ASSERT(!(parseFlags & kParseInsituFlag)); + GenericStringStream s(str); + return ParseStream(s); + } + + //! Parse JSON text from a read-only string + /*! \tparam parseFlags Combination of \ref ParseFlag (must not contain \ref kParseInsituFlag). + \param str Read-only zero-terminated string to be parsed. + */ + template + GenericDocument& Parse(const Ch* str) { + return Parse(str); + } + + //! Parse JSON text from a read-only string (with \ref kParseDefaultFlags) + /*! \param str Read-only zero-terminated string to be parsed. + */ + GenericDocument& Parse(const Ch* str) { + return Parse(str); + } + + template + GenericDocument& Parse(const typename SourceEncoding::Ch* str, size_t length) { + RAPIDJSON_ASSERT(!(parseFlags & kParseInsituFlag)); + MemoryStream ms(static_cast(str), length * sizeof(typename SourceEncoding::Ch)); + EncodedInputStream is(ms); + ParseStream(is); + return *this; + } + + template + GenericDocument& Parse(const Ch* str, size_t length) { + return Parse(str, length); + } + + GenericDocument& Parse(const Ch* str, size_t length) { + return Parse(str, length); + } + +#if RAPIDJSON_HAS_STDSTRING + template + GenericDocument& Parse(const std::basic_string& str) { + // c_str() is constant complexity according to standard. Should be faster than Parse(const char*, size_t) + return Parse(str.c_str()); + } + + template + GenericDocument& Parse(const std::basic_string& str) { + return Parse(str.c_str()); + } + + GenericDocument& Parse(const std::basic_string& str) { + return Parse(str); + } +#endif // RAPIDJSON_HAS_STDSTRING + + //!@} + + //!@name Handling parse errors + //!@{ + + //! Whether a parse error has occured in the last parsing. + bool HasParseError() const { return parseResult_.IsError(); } + + //! Get the \ref ParseErrorCode of last parsing. + ParseErrorCode GetParseError() const { return parseResult_.Code(); } + + //! Get the position of last parsing error in input, 0 otherwise. + size_t GetErrorOffset() const { return parseResult_.Offset(); } + + //! Implicit conversion to get the last parse result +#ifndef __clang // -Wdocumentation + /*! \return \ref ParseResult of the last parse operation + + \code + Document doc; + ParseResult ok = doc.Parse(json); + if (!ok) + printf( "JSON parse error: %s (%u)\n", GetParseError_En(ok.Code()), ok.Offset()); + \endcode + */ +#endif + operator ParseResult() const { return parseResult_; } + //!@} + + //! Get the allocator of this document. + Allocator& GetAllocator() { + RAPIDJSON_ASSERT(allocator_); + return *allocator_; + } + + //! Get the capacity of stack in bytes. + size_t GetStackCapacity() const { return stack_.GetCapacity(); } + +private: + // clear stack on any exit from ParseStream, e.g. due to exception + struct ClearStackOnExit { + explicit ClearStackOnExit(GenericDocument& d) : d_(d) {} + ~ClearStackOnExit() { d_.ClearStack(); } + private: + ClearStackOnExit(const ClearStackOnExit&); + ClearStackOnExit& operator=(const ClearStackOnExit&); + GenericDocument& d_; + }; + + // callers of the following private Handler functions + // template friend class GenericReader; // for parsing + template friend class GenericValue; // for deep copying + +public: + // Implementation of Handler + bool Null() { new (stack_.template Push()) ValueType(); return true; } + bool Bool(bool b) { new (stack_.template Push()) ValueType(b); return true; } + bool Int(int i) { new (stack_.template Push()) ValueType(i); return true; } + bool Uint(unsigned i) { new (stack_.template Push()) ValueType(i); return true; } + bool Int64(int64_t i) { new (stack_.template Push()) ValueType(i); return true; } + bool Uint64(uint64_t i) { new (stack_.template Push()) ValueType(i); return true; } + bool Double(double d) { new (stack_.template Push()) ValueType(d); return true; } + + bool RawNumber(const Ch* str, SizeType length, bool copy) { + if (copy) + new (stack_.template Push()) ValueType(str, length, GetAllocator()); + else + new (stack_.template Push()) ValueType(str, length); + return true; + } + + bool String(const Ch* str, SizeType length, bool copy) { + if (copy) + new (stack_.template Push()) ValueType(str, length, GetAllocator()); + else + new (stack_.template Push()) ValueType(str, length); + return true; + } + + bool StartObject() { new (stack_.template Push()) ValueType(kObjectType); return true; } + + bool Key(const Ch* str, SizeType length, bool copy) { return String(str, length, copy); } + + bool EndObject(SizeType memberCount) { + typename ValueType::Member* members = stack_.template Pop(memberCount); + stack_.template Top()->SetObjectRaw(members, memberCount, GetAllocator()); + return true; + } + + bool StartArray() { new (stack_.template Push()) ValueType(kArrayType); return true; } + + bool EndArray(SizeType elementCount) { + ValueType* elements = stack_.template Pop(elementCount); + stack_.template Top()->SetArrayRaw(elements, elementCount, GetAllocator()); + return true; + } + +private: + //! Prohibit copying + GenericDocument(const GenericDocument&); + //! Prohibit assignment + GenericDocument& operator=(const GenericDocument&); + + void ClearStack() { + if (Allocator::kNeedFree) + while (stack_.GetSize() > 0) // Here assumes all elements in stack array are GenericValue (Member is actually 2 GenericValue objects) + (stack_.template Pop(1))->~ValueType(); + else + stack_.Clear(); + stack_.ShrinkToFit(); + } + + void Destroy() { + RAPIDJSON_DELETE(ownAllocator_); + } + + static const size_t kDefaultStackCapacity = 1024; + Allocator* allocator_; + Allocator* ownAllocator_; + internal::Stack stack_; + ParseResult parseResult_; +}; + +//! GenericDocument with UTF8 encoding +typedef GenericDocument > Document; + +// defined here due to the dependency on GenericDocument +template +template +inline +GenericValue::GenericValue(const GenericValue& rhs, Allocator& allocator) +{ + switch (rhs.GetType()) { + case kObjectType: + case kArrayType: { // perform deep copy via SAX Handler + GenericDocument d(&allocator); + rhs.Accept(d); + RawAssign(*d.stack_.template Pop(1)); + } + break; + case kStringType: + if (rhs.data_.f.flags == kConstStringFlag) { + data_.f.flags = rhs.data_.f.flags; + data_ = *reinterpret_cast(&rhs.data_); + } else { + SetStringRaw(StringRef(rhs.GetString(), rhs.GetStringLength()), allocator); + } + break; + default: + data_.f.flags = rhs.data_.f.flags; + data_ = *reinterpret_cast(&rhs.data_); + break; + } +} + +//! Helper class for accessing Value of array type. +/*! + Instance of this helper class is obtained by \c GenericValue::GetArray(). + In addition to all APIs for array type, it provides range-based for loop if \c RAPIDJSON_HAS_CXX11_RANGE_FOR=1. +*/ +template +class GenericArray { +public: + typedef GenericArray ConstArray; + typedef GenericArray Array; + typedef ValueT PlainType; + typedef typename internal::MaybeAddConst::Type ValueType; + typedef ValueType* ValueIterator; // This may be const or non-const iterator + typedef const ValueT* ConstValueIterator; + typedef typename ValueType::AllocatorType AllocatorType; + typedef typename ValueType::StringRefType StringRefType; + + template + friend class GenericValue; + + GenericArray(const GenericArray& rhs) : value_(rhs.value_) {} + GenericArray& operator=(const GenericArray& rhs) { value_ = rhs.value_; return *this; } + ~GenericArray() {} + + SizeType Size() const { return value_.Size(); } + SizeType Capacity() const { return value_.Capacity(); } + bool Empty() const { return value_.Empty(); } + void Clear() const { value_.Clear(); } + ValueType& operator[](SizeType index) const { return value_[index]; } + ValueIterator Begin() const { return value_.Begin(); } + ValueIterator End() const { return value_.End(); } + GenericArray Reserve(SizeType newCapacity, AllocatorType &allocator) const { value_.Reserve(newCapacity, allocator); return *this; } + GenericArray PushBack(ValueType& value, AllocatorType& allocator) const { value_.PushBack(value, allocator); return *this; } +#if RAPIDJSON_HAS_CXX11_RVALUE_REFS + GenericArray PushBack(ValueType&& value, AllocatorType& allocator) const { value_.PushBack(value, allocator); return *this; } +#endif // RAPIDJSON_HAS_CXX11_RVALUE_REFS + GenericArray PushBack(StringRefType value, AllocatorType& allocator) const { value_.PushBack(value, allocator); return *this; } + template RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (const GenericArray&)) PushBack(T value, AllocatorType& allocator) const { value_.PushBack(value, allocator); return *this; } + GenericArray PopBack() const { value_.PopBack(); return *this; } + ValueIterator Erase(ConstValueIterator pos) const { return value_.Erase(pos); } + ValueIterator Erase(ConstValueIterator first, ConstValueIterator last) const { return value_.Erase(first, last); } + +#if RAPIDJSON_HAS_CXX11_RANGE_FOR + ValueIterator begin() const { return value_.Begin(); } + ValueIterator end() const { return value_.End(); } +#endif + +private: + GenericArray(); + GenericArray(ValueType& value) : value_(value) {} + ValueType& value_; +}; + +//! Helper class for accessing Value of object type. +/*! + Instance of this helper class is obtained by \c GenericValue::GetObject(). + In addition to all APIs for array type, it provides range-based for loop if \c RAPIDJSON_HAS_CXX11_RANGE_FOR=1. +*/ +template +class GenericObject { +public: + typedef GenericObject ConstObject; + typedef GenericObject Object; + typedef ValueT PlainType; + typedef typename internal::MaybeAddConst::Type ValueType; + typedef GenericMemberIterator MemberIterator; // This may be const or non-const iterator + typedef GenericMemberIterator ConstMemberIterator; + typedef typename ValueType::AllocatorType AllocatorType; + typedef typename ValueType::StringRefType StringRefType; + typedef typename ValueType::EncodingType EncodingType; + typedef typename ValueType::Ch Ch; + + template + friend class GenericValue; + + GenericObject(const GenericObject& rhs) : value_(rhs.value_) {} + GenericObject& operator=(const GenericObject& rhs) { value_ = rhs.value_; return *this; } + ~GenericObject() {} + + SizeType MemberCount() const { return value_.MemberCount(); } + bool ObjectEmpty() const { return value_.ObjectEmpty(); } + template ValueType& operator[](T* name) const { return value_[name]; } + template ValueType& operator[](const GenericValue& name) const { return value_[name]; } +#if RAPIDJSON_HAS_STDSTRING + ValueType& operator[](const std::basic_string& name) const { return value_[name]; } +#endif + MemberIterator MemberBegin() const { return value_.MemberBegin(); } + MemberIterator MemberEnd() const { return value_.MemberEnd(); } + bool HasMember(const Ch* name) const { return value_.HasMember(name); } +#if RAPIDJSON_HAS_STDSTRING + bool HasMember(const std::basic_string& name) const { return value_.HasMember(name); } +#endif + template bool HasMember(const GenericValue& name) const { return value_.HasMember(name); } + MemberIterator FindMember(const Ch* name) const { return value_.FindMember(name); } + template MemberIterator FindMember(const GenericValue& name) const { return value_.FindMember(name); } +#if RAPIDJSON_HAS_STDSTRING + MemberIterator FindMember(const std::basic_string& name) const { return value_.FindMember(name); } +#endif + GenericObject AddMember(ValueType& name, ValueType& value, AllocatorType& allocator) const { value_.AddMember(name, value, allocator); return *this; } + GenericObject AddMember(ValueType& name, StringRefType value, AllocatorType& allocator) const { value_.AddMember(name, value, allocator); return *this; } +#if RAPIDJSON_HAS_STDSTRING + GenericObject AddMember(ValueType& name, std::basic_string& value, AllocatorType& allocator) const { value_.AddMember(name, value, allocator); return *this; } +#endif + template RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (ValueType&)) AddMember(ValueType& name, T value, AllocatorType& allocator) const { value_.AddMember(name, value, allocator); return *this; } +#if RAPIDJSON_HAS_CXX11_RVALUE_REFS + GenericObject AddMember(ValueType&& name, ValueType&& value, AllocatorType& allocator) const { value_.AddMember(name, value, allocator); return *this; } + GenericObject AddMember(ValueType&& name, ValueType& value, AllocatorType& allocator) const { value_.AddMember(name, value, allocator); return *this; } + GenericObject AddMember(ValueType& name, ValueType&& value, AllocatorType& allocator) const { value_.AddMember(name, value, allocator); return *this; } + GenericObject AddMember(StringRefType name, ValueType&& value, AllocatorType& allocator) const { value_.AddMember(name, value, allocator); return *this; } +#endif // RAPIDJSON_HAS_CXX11_RVALUE_REFS + GenericObject AddMember(StringRefType name, ValueType& value, AllocatorType& allocator) const { value_.AddMember(name, value, allocator); return *this; } + GenericObject AddMember(StringRefType name, StringRefType value, AllocatorType& allocator) const { value_.AddMember(name, value, allocator); return *this; } + template RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (GenericObject)) AddMember(StringRefType name, T value, AllocatorType& allocator) const { value_.AddMember(name, value, allocator); return *this; } + void RemoveAllMembers() { return value_.RemoveAllMembers(); } + bool RemoveMember(const Ch* name) const { return value_.RemoveMember(name); } +#if RAPIDJSON_HAS_STDSTRING + bool RemoveMember(const std::basic_string& name) const { return value_.RemoveMember(name); } +#endif + template bool RemoveMember(const GenericValue& name) const { return value_.RemoveMember(name); } + MemberIterator RemoveMember(MemberIterator m) const { return value_.RemoveMember(m); } + MemberIterator EraseMember(ConstMemberIterator pos) const { return value_.EraseMember(pos); } + MemberIterator EraseMember(ConstMemberIterator first, ConstMemberIterator last) const { return value_.EraseMember(first, last); } + bool EraseMember(const Ch* name) const { return value_.EraseMember(name); } +#if RAPIDJSON_HAS_STDSTRING + bool EraseMember(const std::basic_string& name) const { return EraseMember(ValueType(StringRef(name))); } +#endif + template bool EraseMember(const GenericValue& name) const { return value_.EraseMember(name); } + +#if RAPIDJSON_HAS_CXX11_RANGE_FOR + MemberIterator begin() const { return value_.MemberBegin(); } + MemberIterator end() const { return value_.MemberEnd(); } +#endif + +private: + GenericObject(); + GenericObject(ValueType& value) : value_(value) {} + ValueType& value_; +}; + +RAPIDJSON_NAMESPACE_END +RAPIDJSON_DIAG_POP + +#endif // RAPIDJSON_DOCUMENT_H_ diff --git a/include/rapidjson/encodedstream.h b/include/rapidjson/encodedstream.h new file mode 100644 index 0000000..1450683 --- /dev/null +++ b/include/rapidjson/encodedstream.h @@ -0,0 +1,299 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_ENCODEDSTREAM_H_ +#define RAPIDJSON_ENCODEDSTREAM_H_ + +#include "stream.h" +#include "memorystream.h" + +#ifdef __GNUC__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(effc++) +#endif + +#ifdef __clang__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(padded) +#endif + +RAPIDJSON_NAMESPACE_BEGIN + +//! Input byte stream wrapper with a statically bound encoding. +/*! + \tparam Encoding The interpretation of encoding of the stream. Either UTF8, UTF16LE, UTF16BE, UTF32LE, UTF32BE. + \tparam InputByteStream Type of input byte stream. For example, FileReadStream. +*/ +template +class EncodedInputStream { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputByteStream::Ch) == 1); +public: + typedef typename Encoding::Ch Ch; + + EncodedInputStream(InputByteStream& is) : is_(is) { + current_ = Encoding::TakeBOM(is_); + } + + Ch Peek() const { return current_; } + Ch Take() { Ch c = current_; current_ = Encoding::Take(is_); return c; } + size_t Tell() const { return is_.Tell(); } + + // Not implemented + void Put(Ch) { RAPIDJSON_ASSERT(false); } + void Flush() { RAPIDJSON_ASSERT(false); } + Ch* PutBegin() { RAPIDJSON_ASSERT(false); return 0; } + size_t PutEnd(Ch*) { RAPIDJSON_ASSERT(false); return 0; } + +private: + EncodedInputStream(const EncodedInputStream&); + EncodedInputStream& operator=(const EncodedInputStream&); + + InputByteStream& is_; + Ch current_; +}; + +//! Specialized for UTF8 MemoryStream. +template <> +class EncodedInputStream, MemoryStream> { +public: + typedef UTF8<>::Ch Ch; + + EncodedInputStream(MemoryStream& is) : is_(is) { + if (static_cast(is_.Peek()) == 0xEFu) is_.Take(); + if (static_cast(is_.Peek()) == 0xBBu) is_.Take(); + if (static_cast(is_.Peek()) == 0xBFu) is_.Take(); + } + Ch Peek() const { return is_.Peek(); } + Ch Take() { return is_.Take(); } + size_t Tell() const { return is_.Tell(); } + + // Not implemented + void Put(Ch) {} + void Flush() {} + Ch* PutBegin() { return 0; } + size_t PutEnd(Ch*) { return 0; } + + MemoryStream& is_; + +private: + EncodedInputStream(const EncodedInputStream&); + EncodedInputStream& operator=(const EncodedInputStream&); +}; + +//! Output byte stream wrapper with statically bound encoding. +/*! + \tparam Encoding The interpretation of encoding of the stream. Either UTF8, UTF16LE, UTF16BE, UTF32LE, UTF32BE. + \tparam OutputByteStream Type of input byte stream. For example, FileWriteStream. +*/ +template +class EncodedOutputStream { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputByteStream::Ch) == 1); +public: + typedef typename Encoding::Ch Ch; + + EncodedOutputStream(OutputByteStream& os, bool putBOM = true) : os_(os) { + if (putBOM) + Encoding::PutBOM(os_); + } + + void Put(Ch c) { Encoding::Put(os_, c); } + void Flush() { os_.Flush(); } + + // Not implemented + Ch Peek() const { RAPIDJSON_ASSERT(false); return 0;} + Ch Take() { RAPIDJSON_ASSERT(false); return 0;} + size_t Tell() const { RAPIDJSON_ASSERT(false); return 0; } + Ch* PutBegin() { RAPIDJSON_ASSERT(false); return 0; } + size_t PutEnd(Ch*) { RAPIDJSON_ASSERT(false); return 0; } + +private: + EncodedOutputStream(const EncodedOutputStream&); + EncodedOutputStream& operator=(const EncodedOutputStream&); + + OutputByteStream& os_; +}; + +#define RAPIDJSON_ENCODINGS_FUNC(x) UTF8::x, UTF16LE::x, UTF16BE::x, UTF32LE::x, UTF32BE::x + +//! Input stream wrapper with dynamically bound encoding and automatic encoding detection. +/*! + \tparam CharType Type of character for reading. + \tparam InputByteStream type of input byte stream to be wrapped. +*/ +template +class AutoUTFInputStream { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputByteStream::Ch) == 1); +public: + typedef CharType Ch; + + //! Constructor. + /*! + \param is input stream to be wrapped. + \param type UTF encoding type if it is not detected from the stream. + */ + AutoUTFInputStream(InputByteStream& is, UTFType type = kUTF8) : is_(&is), type_(type), hasBOM_(false) { + RAPIDJSON_ASSERT(type >= kUTF8 && type <= kUTF32BE); + DetectType(); + static const TakeFunc f[] = { RAPIDJSON_ENCODINGS_FUNC(Take) }; + takeFunc_ = f[type_]; + current_ = takeFunc_(*is_); + } + + UTFType GetType() const { return type_; } + bool HasBOM() const { return hasBOM_; } + + Ch Peek() const { return current_; } + Ch Take() { Ch c = current_; current_ = takeFunc_(*is_); return c; } + size_t Tell() const { return is_->Tell(); } + + // Not implemented + void Put(Ch) { RAPIDJSON_ASSERT(false); } + void Flush() { RAPIDJSON_ASSERT(false); } + Ch* PutBegin() { RAPIDJSON_ASSERT(false); return 0; } + size_t PutEnd(Ch*) { RAPIDJSON_ASSERT(false); return 0; } + +private: + AutoUTFInputStream(const AutoUTFInputStream&); + AutoUTFInputStream& operator=(const AutoUTFInputStream&); + + // Detect encoding type with BOM or RFC 4627 + void DetectType() { + // BOM (Byte Order Mark): + // 00 00 FE FF UTF-32BE + // FF FE 00 00 UTF-32LE + // FE FF UTF-16BE + // FF FE UTF-16LE + // EF BB BF UTF-8 + + const unsigned char* c = reinterpret_cast(is_->Peek4()); + if (!c) + return; + + unsigned bom = static_cast(c[0] | (c[1] << 8) | (c[2] << 16) | (c[3] << 24)); + hasBOM_ = false; + if (bom == 0xFFFE0000) { type_ = kUTF32BE; hasBOM_ = true; is_->Take(); is_->Take(); is_->Take(); is_->Take(); } + else if (bom == 0x0000FEFF) { type_ = kUTF32LE; hasBOM_ = true; is_->Take(); is_->Take(); is_->Take(); is_->Take(); } + else if ((bom & 0xFFFF) == 0xFFFE) { type_ = kUTF16BE; hasBOM_ = true; is_->Take(); is_->Take(); } + else if ((bom & 0xFFFF) == 0xFEFF) { type_ = kUTF16LE; hasBOM_ = true; is_->Take(); is_->Take(); } + else if ((bom & 0xFFFFFF) == 0xBFBBEF) { type_ = kUTF8; hasBOM_ = true; is_->Take(); is_->Take(); is_->Take(); } + + // RFC 4627: Section 3 + // "Since the first two characters of a JSON text will always be ASCII + // characters [RFC0020], it is possible to determine whether an octet + // stream is UTF-8, UTF-16 (BE or LE), or UTF-32 (BE or LE) by looking + // at the pattern of nulls in the first four octets." + // 00 00 00 xx UTF-32BE + // 00 xx 00 xx UTF-16BE + // xx 00 00 00 UTF-32LE + // xx 00 xx 00 UTF-16LE + // xx xx xx xx UTF-8 + + if (!hasBOM_) { + unsigned pattern = (c[0] ? 1 : 0) | (c[1] ? 2 : 0) | (c[2] ? 4 : 0) | (c[3] ? 8 : 0); + switch (pattern) { + case 0x08: type_ = kUTF32BE; break; + case 0x0A: type_ = kUTF16BE; break; + case 0x01: type_ = kUTF32LE; break; + case 0x05: type_ = kUTF16LE; break; + case 0x0F: type_ = kUTF8; break; + default: break; // Use type defined by user. + } + } + + // Runtime check whether the size of character type is sufficient. It only perform checks with assertion. + if (type_ == kUTF16LE || type_ == kUTF16BE) RAPIDJSON_ASSERT(sizeof(Ch) >= 2); + if (type_ == kUTF32LE || type_ == kUTF32BE) RAPIDJSON_ASSERT(sizeof(Ch) >= 4); + } + + typedef Ch (*TakeFunc)(InputByteStream& is); + InputByteStream* is_; + UTFType type_; + Ch current_; + TakeFunc takeFunc_; + bool hasBOM_; +}; + +//! Output stream wrapper with dynamically bound encoding and automatic encoding detection. +/*! + \tparam CharType Type of character for writing. + \tparam OutputByteStream type of output byte stream to be wrapped. +*/ +template +class AutoUTFOutputStream { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputByteStream::Ch) == 1); +public: + typedef CharType Ch; + + //! Constructor. + /*! + \param os output stream to be wrapped. + \param type UTF encoding type. + \param putBOM Whether to write BOM at the beginning of the stream. + */ + AutoUTFOutputStream(OutputByteStream& os, UTFType type, bool putBOM) : os_(&os), type_(type) { + RAPIDJSON_ASSERT(type >= kUTF8 && type <= kUTF32BE); + + // Runtime check whether the size of character type is sufficient. It only perform checks with assertion. + if (type_ == kUTF16LE || type_ == kUTF16BE) RAPIDJSON_ASSERT(sizeof(Ch) >= 2); + if (type_ == kUTF32LE || type_ == kUTF32BE) RAPIDJSON_ASSERT(sizeof(Ch) >= 4); + + static const PutFunc f[] = { RAPIDJSON_ENCODINGS_FUNC(Put) }; + putFunc_ = f[type_]; + + if (putBOM) + PutBOM(); + } + + UTFType GetType() const { return type_; } + + void Put(Ch c) { putFunc_(*os_, c); } + void Flush() { os_->Flush(); } + + // Not implemented + Ch Peek() const { RAPIDJSON_ASSERT(false); return 0;} + Ch Take() { RAPIDJSON_ASSERT(false); return 0;} + size_t Tell() const { RAPIDJSON_ASSERT(false); return 0; } + Ch* PutBegin() { RAPIDJSON_ASSERT(false); return 0; } + size_t PutEnd(Ch*) { RAPIDJSON_ASSERT(false); return 0; } + +private: + AutoUTFOutputStream(const AutoUTFOutputStream&); + AutoUTFOutputStream& operator=(const AutoUTFOutputStream&); + + void PutBOM() { + typedef void (*PutBOMFunc)(OutputByteStream&); + static const PutBOMFunc f[] = { RAPIDJSON_ENCODINGS_FUNC(PutBOM) }; + f[type_](*os_); + } + + typedef void (*PutFunc)(OutputByteStream&, Ch); + + OutputByteStream* os_; + UTFType type_; + PutFunc putFunc_; +}; + +#undef RAPIDJSON_ENCODINGS_FUNC + +RAPIDJSON_NAMESPACE_END + +#ifdef __clang__ +RAPIDJSON_DIAG_POP +#endif + +#ifdef __GNUC__ +RAPIDJSON_DIAG_POP +#endif + +#endif // RAPIDJSON_FILESTREAM_H_ diff --git a/include/rapidjson/encodings.h b/include/rapidjson/encodings.h new file mode 100644 index 0000000..baa7c2b --- /dev/null +++ b/include/rapidjson/encodings.h @@ -0,0 +1,716 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_ENCODINGS_H_ +#define RAPIDJSON_ENCODINGS_H_ + +#include "rapidjson.h" + +#ifdef _MSC_VER +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(4244) // conversion from 'type1' to 'type2', possible loss of data +RAPIDJSON_DIAG_OFF(4702) // unreachable code +#elif defined(__GNUC__) +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(effc++) +RAPIDJSON_DIAG_OFF(overflow) +#endif + +RAPIDJSON_NAMESPACE_BEGIN + +/////////////////////////////////////////////////////////////////////////////// +// Encoding + +/*! \class rapidjson::Encoding + \brief Concept for encoding of Unicode characters. + +\code +concept Encoding { + typename Ch; //! Type of character. A "character" is actually a code unit in unicode's definition. + + enum { supportUnicode = 1 }; // or 0 if not supporting unicode + + //! \brief Encode a Unicode codepoint to an output stream. + //! \param os Output stream. + //! \param codepoint An unicode codepoint, ranging from 0x0 to 0x10FFFF inclusively. + template + static void Encode(OutputStream& os, unsigned codepoint); + + //! \brief Decode a Unicode codepoint from an input stream. + //! \param is Input stream. + //! \param codepoint Output of the unicode codepoint. + //! \return true if a valid codepoint can be decoded from the stream. + template + static bool Decode(InputStream& is, unsigned* codepoint); + + //! \brief Validate one Unicode codepoint from an encoded stream. + //! \param is Input stream to obtain codepoint. + //! \param os Output for copying one codepoint. + //! \return true if it is valid. + //! \note This function just validating and copying the codepoint without actually decode it. + template + static bool Validate(InputStream& is, OutputStream& os); + + // The following functions are deal with byte streams. + + //! Take a character from input byte stream, skip BOM if exist. + template + static CharType TakeBOM(InputByteStream& is); + + //! Take a character from input byte stream. + template + static Ch Take(InputByteStream& is); + + //! Put BOM to output byte stream. + template + static void PutBOM(OutputByteStream& os); + + //! Put a character to output byte stream. + template + static void Put(OutputByteStream& os, Ch c); +}; +\endcode +*/ + +/////////////////////////////////////////////////////////////////////////////// +// UTF8 + +//! UTF-8 encoding. +/*! http://en.wikipedia.org/wiki/UTF-8 + http://tools.ietf.org/html/rfc3629 + \tparam CharType Code unit for storing 8-bit UTF-8 data. Default is char. + \note implements Encoding concept +*/ +template +struct UTF8 { + typedef CharType Ch; + + enum { supportUnicode = 1 }; + + template + static void Encode(OutputStream& os, unsigned codepoint) { + if (codepoint <= 0x7F) + os.Put(static_cast(codepoint & 0xFF)); + else if (codepoint <= 0x7FF) { + os.Put(static_cast(0xC0 | ((codepoint >> 6) & 0xFF))); + os.Put(static_cast(0x80 | ((codepoint & 0x3F)))); + } + else if (codepoint <= 0xFFFF) { + os.Put(static_cast(0xE0 | ((codepoint >> 12) & 0xFF))); + os.Put(static_cast(0x80 | ((codepoint >> 6) & 0x3F))); + os.Put(static_cast(0x80 | (codepoint & 0x3F))); + } + else { + RAPIDJSON_ASSERT(codepoint <= 0x10FFFF); + os.Put(static_cast(0xF0 | ((codepoint >> 18) & 0xFF))); + os.Put(static_cast(0x80 | ((codepoint >> 12) & 0x3F))); + os.Put(static_cast(0x80 | ((codepoint >> 6) & 0x3F))); + os.Put(static_cast(0x80 | (codepoint & 0x3F))); + } + } + + template + static void EncodeUnsafe(OutputStream& os, unsigned codepoint) { + if (codepoint <= 0x7F) + PutUnsafe(os, static_cast(codepoint & 0xFF)); + else if (codepoint <= 0x7FF) { + PutUnsafe(os, static_cast(0xC0 | ((codepoint >> 6) & 0xFF))); + PutUnsafe(os, static_cast(0x80 | ((codepoint & 0x3F)))); + } + else if (codepoint <= 0xFFFF) { + PutUnsafe(os, static_cast(0xE0 | ((codepoint >> 12) & 0xFF))); + PutUnsafe(os, static_cast(0x80 | ((codepoint >> 6) & 0x3F))); + PutUnsafe(os, static_cast(0x80 | (codepoint & 0x3F))); + } + else { + RAPIDJSON_ASSERT(codepoint <= 0x10FFFF); + PutUnsafe(os, static_cast(0xF0 | ((codepoint >> 18) & 0xFF))); + PutUnsafe(os, static_cast(0x80 | ((codepoint >> 12) & 0x3F))); + PutUnsafe(os, static_cast(0x80 | ((codepoint >> 6) & 0x3F))); + PutUnsafe(os, static_cast(0x80 | (codepoint & 0x3F))); + } + } + + template + static bool Decode(InputStream& is, unsigned* codepoint) { +#define COPY() c = is.Take(); *codepoint = (*codepoint << 6) | (static_cast(c) & 0x3Fu) +#define TRANS(mask) result &= ((GetRange(static_cast(c)) & mask) != 0) +#define TAIL() COPY(); TRANS(0x70) + typename InputStream::Ch c = is.Take(); + if (!(c & 0x80)) { + *codepoint = static_cast(c); + return true; + } + + unsigned char type = GetRange(static_cast(c)); + if (type >= 32) { + *codepoint = 0; + } else { + *codepoint = (0xFF >> type) & static_cast(c); + } + bool result = true; + switch (type) { + case 2: TAIL(); return result; + case 3: TAIL(); TAIL(); return result; + case 4: COPY(); TRANS(0x50); TAIL(); return result; + case 5: COPY(); TRANS(0x10); TAIL(); TAIL(); return result; + case 6: TAIL(); TAIL(); TAIL(); return result; + case 10: COPY(); TRANS(0x20); TAIL(); return result; + case 11: COPY(); TRANS(0x60); TAIL(); TAIL(); return result; + default: return false; + } +#undef COPY +#undef TRANS +#undef TAIL + } + + template + static bool Validate(InputStream& is, OutputStream& os) { +#define COPY() os.Put(c = is.Take()) +#define TRANS(mask) result &= ((GetRange(static_cast(c)) & mask) != 0) +#define TAIL() COPY(); TRANS(0x70) + Ch c; + COPY(); + if (!(c & 0x80)) + return true; + + bool result = true; + switch (GetRange(static_cast(c))) { + case 2: TAIL(); return result; + case 3: TAIL(); TAIL(); return result; + case 4: COPY(); TRANS(0x50); TAIL(); return result; + case 5: COPY(); TRANS(0x10); TAIL(); TAIL(); return result; + case 6: TAIL(); TAIL(); TAIL(); return result; + case 10: COPY(); TRANS(0x20); TAIL(); return result; + case 11: COPY(); TRANS(0x60); TAIL(); TAIL(); return result; + default: return false; + } +#undef COPY +#undef TRANS +#undef TAIL + } + + static unsigned char GetRange(unsigned char c) { + // Referring to DFA of http://bjoern.hoehrmann.de/utf-8/decoder/dfa/ + // With new mapping 1 -> 0x10, 7 -> 0x20, 9 -> 0x40, such that AND operation can test multiple types. + static const unsigned char type[] = { + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10, + 0x40,0x40,0x40,0x40,0x40,0x40,0x40,0x40,0x40,0x40,0x40,0x40,0x40,0x40,0x40,0x40, + 0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20, + 0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20, + 8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, + 10,3,3,3,3,3,3,3,3,3,3,3,3,4,3,3, 11,6,6,6,5,8,8,8,8,8,8,8,8,8,8,8, + }; + return type[c]; + } + + template + static CharType TakeBOM(InputByteStream& is) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputByteStream::Ch) == 1); + typename InputByteStream::Ch c = Take(is); + if (static_cast(c) != 0xEFu) return c; + c = is.Take(); + if (static_cast(c) != 0xBBu) return c; + c = is.Take(); + if (static_cast(c) != 0xBFu) return c; + c = is.Take(); + return c; + } + + template + static Ch Take(InputByteStream& is) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputByteStream::Ch) == 1); + return static_cast(is.Take()); + } + + template + static void PutBOM(OutputByteStream& os) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputByteStream::Ch) == 1); + os.Put(static_cast(0xEFu)); + os.Put(static_cast(0xBBu)); + os.Put(static_cast(0xBFu)); + } + + template + static void Put(OutputByteStream& os, Ch c) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputByteStream::Ch) == 1); + os.Put(static_cast(c)); + } +}; + +/////////////////////////////////////////////////////////////////////////////// +// UTF16 + +//! UTF-16 encoding. +/*! http://en.wikipedia.org/wiki/UTF-16 + http://tools.ietf.org/html/rfc2781 + \tparam CharType Type for storing 16-bit UTF-16 data. Default is wchar_t. C++11 may use char16_t instead. + \note implements Encoding concept + + \note For in-memory access, no need to concern endianness. The code units and code points are represented by CPU's endianness. + For streaming, use UTF16LE and UTF16BE, which handle endianness. +*/ +template +struct UTF16 { + typedef CharType Ch; + RAPIDJSON_STATIC_ASSERT(sizeof(Ch) >= 2); + + enum { supportUnicode = 1 }; + + template + static void Encode(OutputStream& os, unsigned codepoint) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputStream::Ch) >= 2); + if (codepoint <= 0xFFFF) { + RAPIDJSON_ASSERT(codepoint < 0xD800 || codepoint > 0xDFFF); // Code point itself cannot be surrogate pair + os.Put(static_cast(codepoint)); + } + else { + RAPIDJSON_ASSERT(codepoint <= 0x10FFFF); + unsigned v = codepoint - 0x10000; + os.Put(static_cast((v >> 10) | 0xD800)); + os.Put((v & 0x3FF) | 0xDC00); + } + } + + + template + static void EncodeUnsafe(OutputStream& os, unsigned codepoint) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputStream::Ch) >= 2); + if (codepoint <= 0xFFFF) { + RAPIDJSON_ASSERT(codepoint < 0xD800 || codepoint > 0xDFFF); // Code point itself cannot be surrogate pair + PutUnsafe(os, static_cast(codepoint)); + } + else { + RAPIDJSON_ASSERT(codepoint <= 0x10FFFF); + unsigned v = codepoint - 0x10000; + PutUnsafe(os, static_cast((v >> 10) | 0xD800)); + PutUnsafe(os, (v & 0x3FF) | 0xDC00); + } + } + + template + static bool Decode(InputStream& is, unsigned* codepoint) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputStream::Ch) >= 2); + typename InputStream::Ch c = is.Take(); + if (c < 0xD800 || c > 0xDFFF) { + *codepoint = static_cast(c); + return true; + } + else if (c <= 0xDBFF) { + *codepoint = (static_cast(c) & 0x3FF) << 10; + c = is.Take(); + *codepoint |= (static_cast(c) & 0x3FF); + *codepoint += 0x10000; + return c >= 0xDC00 && c <= 0xDFFF; + } + return false; + } + + template + static bool Validate(InputStream& is, OutputStream& os) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputStream::Ch) >= 2); + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputStream::Ch) >= 2); + typename InputStream::Ch c; + os.Put(static_cast(c = is.Take())); + if (c < 0xD800 || c > 0xDFFF) + return true; + else if (c <= 0xDBFF) { + os.Put(c = is.Take()); + return c >= 0xDC00 && c <= 0xDFFF; + } + return false; + } +}; + +//! UTF-16 little endian encoding. +template +struct UTF16LE : UTF16 { + template + static CharType TakeBOM(InputByteStream& is) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputByteStream::Ch) == 1); + CharType c = Take(is); + return static_cast(c) == 0xFEFFu ? Take(is) : c; + } + + template + static CharType Take(InputByteStream& is) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputByteStream::Ch) == 1); + unsigned c = static_cast(is.Take()); + c |= static_cast(static_cast(is.Take())) << 8; + return static_cast(c); + } + + template + static void PutBOM(OutputByteStream& os) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputByteStream::Ch) == 1); + os.Put(static_cast(0xFFu)); + os.Put(static_cast(0xFEu)); + } + + template + static void Put(OutputByteStream& os, CharType c) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputByteStream::Ch) == 1); + os.Put(static_cast(static_cast(c) & 0xFFu)); + os.Put(static_cast((static_cast(c) >> 8) & 0xFFu)); + } +}; + +//! UTF-16 big endian encoding. +template +struct UTF16BE : UTF16 { + template + static CharType TakeBOM(InputByteStream& is) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputByteStream::Ch) == 1); + CharType c = Take(is); + return static_cast(c) == 0xFEFFu ? Take(is) : c; + } + + template + static CharType Take(InputByteStream& is) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputByteStream::Ch) == 1); + unsigned c = static_cast(static_cast(is.Take())) << 8; + c |= static_cast(is.Take()); + return static_cast(c); + } + + template + static void PutBOM(OutputByteStream& os) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputByteStream::Ch) == 1); + os.Put(static_cast(0xFEu)); + os.Put(static_cast(0xFFu)); + } + + template + static void Put(OutputByteStream& os, CharType c) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputByteStream::Ch) == 1); + os.Put(static_cast((static_cast(c) >> 8) & 0xFFu)); + os.Put(static_cast(static_cast(c) & 0xFFu)); + } +}; + +/////////////////////////////////////////////////////////////////////////////// +// UTF32 + +//! UTF-32 encoding. +/*! http://en.wikipedia.org/wiki/UTF-32 + \tparam CharType Type for storing 32-bit UTF-32 data. Default is unsigned. C++11 may use char32_t instead. + \note implements Encoding concept + + \note For in-memory access, no need to concern endianness. The code units and code points are represented by CPU's endianness. + For streaming, use UTF32LE and UTF32BE, which handle endianness. +*/ +template +struct UTF32 { + typedef CharType Ch; + RAPIDJSON_STATIC_ASSERT(sizeof(Ch) >= 4); + + enum { supportUnicode = 1 }; + + template + static void Encode(OutputStream& os, unsigned codepoint) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputStream::Ch) >= 4); + RAPIDJSON_ASSERT(codepoint <= 0x10FFFF); + os.Put(codepoint); + } + + template + static void EncodeUnsafe(OutputStream& os, unsigned codepoint) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputStream::Ch) >= 4); + RAPIDJSON_ASSERT(codepoint <= 0x10FFFF); + PutUnsafe(os, codepoint); + } + + template + static bool Decode(InputStream& is, unsigned* codepoint) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputStream::Ch) >= 4); + Ch c = is.Take(); + *codepoint = c; + return c <= 0x10FFFF; + } + + template + static bool Validate(InputStream& is, OutputStream& os) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputStream::Ch) >= 4); + Ch c; + os.Put(c = is.Take()); + return c <= 0x10FFFF; + } +}; + +//! UTF-32 little endian enocoding. +template +struct UTF32LE : UTF32 { + template + static CharType TakeBOM(InputByteStream& is) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputByteStream::Ch) == 1); + CharType c = Take(is); + return static_cast(c) == 0x0000FEFFu ? Take(is) : c; + } + + template + static CharType Take(InputByteStream& is) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputByteStream::Ch) == 1); + unsigned c = static_cast(is.Take()); + c |= static_cast(static_cast(is.Take())) << 8; + c |= static_cast(static_cast(is.Take())) << 16; + c |= static_cast(static_cast(is.Take())) << 24; + return static_cast(c); + } + + template + static void PutBOM(OutputByteStream& os) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputByteStream::Ch) == 1); + os.Put(static_cast(0xFFu)); + os.Put(static_cast(0xFEu)); + os.Put(static_cast(0x00u)); + os.Put(static_cast(0x00u)); + } + + template + static void Put(OutputByteStream& os, CharType c) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputByteStream::Ch) == 1); + os.Put(static_cast(c & 0xFFu)); + os.Put(static_cast((c >> 8) & 0xFFu)); + os.Put(static_cast((c >> 16) & 0xFFu)); + os.Put(static_cast((c >> 24) & 0xFFu)); + } +}; + +//! UTF-32 big endian encoding. +template +struct UTF32BE : UTF32 { + template + static CharType TakeBOM(InputByteStream& is) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputByteStream::Ch) == 1); + CharType c = Take(is); + return static_cast(c) == 0x0000FEFFu ? Take(is) : c; + } + + template + static CharType Take(InputByteStream& is) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputByteStream::Ch) == 1); + unsigned c = static_cast(static_cast(is.Take())) << 24; + c |= static_cast(static_cast(is.Take())) << 16; + c |= static_cast(static_cast(is.Take())) << 8; + c |= static_cast(static_cast(is.Take())); + return static_cast(c); + } + + template + static void PutBOM(OutputByteStream& os) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputByteStream::Ch) == 1); + os.Put(static_cast(0x00u)); + os.Put(static_cast(0x00u)); + os.Put(static_cast(0xFEu)); + os.Put(static_cast(0xFFu)); + } + + template + static void Put(OutputByteStream& os, CharType c) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputByteStream::Ch) == 1); + os.Put(static_cast((c >> 24) & 0xFFu)); + os.Put(static_cast((c >> 16) & 0xFFu)); + os.Put(static_cast((c >> 8) & 0xFFu)); + os.Put(static_cast(c & 0xFFu)); + } +}; + +/////////////////////////////////////////////////////////////////////////////// +// ASCII + +//! ASCII encoding. +/*! http://en.wikipedia.org/wiki/ASCII + \tparam CharType Code unit for storing 7-bit ASCII data. Default is char. + \note implements Encoding concept +*/ +template +struct ASCII { + typedef CharType Ch; + + enum { supportUnicode = 0 }; + + template + static void Encode(OutputStream& os, unsigned codepoint) { + RAPIDJSON_ASSERT(codepoint <= 0x7F); + os.Put(static_cast(codepoint & 0xFF)); + } + + template + static void EncodeUnsafe(OutputStream& os, unsigned codepoint) { + RAPIDJSON_ASSERT(codepoint <= 0x7F); + PutUnsafe(os, static_cast(codepoint & 0xFF)); + } + + template + static bool Decode(InputStream& is, unsigned* codepoint) { + uint8_t c = static_cast(is.Take()); + *codepoint = c; + return c <= 0X7F; + } + + template + static bool Validate(InputStream& is, OutputStream& os) { + uint8_t c = static_cast(is.Take()); + os.Put(static_cast(c)); + return c <= 0x7F; + } + + template + static CharType TakeBOM(InputByteStream& is) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputByteStream::Ch) == 1); + uint8_t c = static_cast(Take(is)); + return static_cast(c); + } + + template + static Ch Take(InputByteStream& is) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename InputByteStream::Ch) == 1); + return static_cast(is.Take()); + } + + template + static void PutBOM(OutputByteStream& os) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputByteStream::Ch) == 1); + (void)os; + } + + template + static void Put(OutputByteStream& os, Ch c) { + RAPIDJSON_STATIC_ASSERT(sizeof(typename OutputByteStream::Ch) == 1); + os.Put(static_cast(c)); + } +}; + +/////////////////////////////////////////////////////////////////////////////// +// AutoUTF + +//! Runtime-specified UTF encoding type of a stream. +enum UTFType { + kUTF8 = 0, //!< UTF-8. + kUTF16LE = 1, //!< UTF-16 little endian. + kUTF16BE = 2, //!< UTF-16 big endian. + kUTF32LE = 3, //!< UTF-32 little endian. + kUTF32BE = 4 //!< UTF-32 big endian. +}; + +//! Dynamically select encoding according to stream's runtime-specified UTF encoding type. +/*! \note This class can be used with AutoUTFInputtStream and AutoUTFOutputStream, which provides GetType(). +*/ +template +struct AutoUTF { + typedef CharType Ch; + + enum { supportUnicode = 1 }; + +#define RAPIDJSON_ENCODINGS_FUNC(x) UTF8::x, UTF16LE::x, UTF16BE::x, UTF32LE::x, UTF32BE::x + + template + RAPIDJSON_FORCEINLINE static void Encode(OutputStream& os, unsigned codepoint) { + typedef void (*EncodeFunc)(OutputStream&, unsigned); + static const EncodeFunc f[] = { RAPIDJSON_ENCODINGS_FUNC(Encode) }; + (*f[os.GetType()])(os, codepoint); + } + + template + RAPIDJSON_FORCEINLINE static void EncodeUnsafe(OutputStream& os, unsigned codepoint) { + typedef void (*EncodeFunc)(OutputStream&, unsigned); + static const EncodeFunc f[] = { RAPIDJSON_ENCODINGS_FUNC(EncodeUnsafe) }; + (*f[os.GetType()])(os, codepoint); + } + + template + RAPIDJSON_FORCEINLINE static bool Decode(InputStream& is, unsigned* codepoint) { + typedef bool (*DecodeFunc)(InputStream&, unsigned*); + static const DecodeFunc f[] = { RAPIDJSON_ENCODINGS_FUNC(Decode) }; + return (*f[is.GetType()])(is, codepoint); + } + + template + RAPIDJSON_FORCEINLINE static bool Validate(InputStream& is, OutputStream& os) { + typedef bool (*ValidateFunc)(InputStream&, OutputStream&); + static const ValidateFunc f[] = { RAPIDJSON_ENCODINGS_FUNC(Validate) }; + return (*f[is.GetType()])(is, os); + } + +#undef RAPIDJSON_ENCODINGS_FUNC +}; + +/////////////////////////////////////////////////////////////////////////////// +// Transcoder + +//! Encoding conversion. +template +struct Transcoder { + //! Take one Unicode codepoint from source encoding, convert it to target encoding and put it to the output stream. + template + RAPIDJSON_FORCEINLINE static bool Transcode(InputStream& is, OutputStream& os) { + unsigned codepoint; + if (!SourceEncoding::Decode(is, &codepoint)) + return false; + TargetEncoding::Encode(os, codepoint); + return true; + } + + template + RAPIDJSON_FORCEINLINE static bool TranscodeUnsafe(InputStream& is, OutputStream& os) { + unsigned codepoint; + if (!SourceEncoding::Decode(is, &codepoint)) + return false; + TargetEncoding::EncodeUnsafe(os, codepoint); + return true; + } + + //! Validate one Unicode codepoint from an encoded stream. + template + RAPIDJSON_FORCEINLINE static bool Validate(InputStream& is, OutputStream& os) { + return Transcode(is, os); // Since source/target encoding is different, must transcode. + } +}; + +// Forward declaration. +template +inline void PutUnsafe(Stream& stream, typename Stream::Ch c); + +//! Specialization of Transcoder with same source and target encoding. +template +struct Transcoder { + template + RAPIDJSON_FORCEINLINE static bool Transcode(InputStream& is, OutputStream& os) { + os.Put(is.Take()); // Just copy one code unit. This semantic is different from primary template class. + return true; + } + + template + RAPIDJSON_FORCEINLINE static bool TranscodeUnsafe(InputStream& is, OutputStream& os) { + PutUnsafe(os, is.Take()); // Just copy one code unit. This semantic is different from primary template class. + return true; + } + + template + RAPIDJSON_FORCEINLINE static bool Validate(InputStream& is, OutputStream& os) { + return Encoding::Validate(is, os); // source/target encoding are the same + } +}; + +RAPIDJSON_NAMESPACE_END + +#if defined(__GNUC__) || defined(_MSC_VER) +RAPIDJSON_DIAG_POP +#endif + +#endif // RAPIDJSON_ENCODINGS_H_ diff --git a/include/rapidjson/error/en.h b/include/rapidjson/error/en.h new file mode 100644 index 0000000..2db838b --- /dev/null +++ b/include/rapidjson/error/en.h @@ -0,0 +1,74 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_ERROR_EN_H_ +#define RAPIDJSON_ERROR_EN_H_ + +#include "error.h" + +#ifdef __clang__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(switch-enum) +RAPIDJSON_DIAG_OFF(covered-switch-default) +#endif + +RAPIDJSON_NAMESPACE_BEGIN + +//! Maps error code of parsing into error message. +/*! + \ingroup RAPIDJSON_ERRORS + \param parseErrorCode Error code obtained in parsing. + \return the error message. + \note User can make a copy of this function for localization. + Using switch-case is safer for future modification of error codes. +*/ +inline const RAPIDJSON_ERROR_CHARTYPE* GetParseError_En(ParseErrorCode parseErrorCode) { + switch (parseErrorCode) { + case kParseErrorNone: return RAPIDJSON_ERROR_STRING("No error."); + + case kParseErrorDocumentEmpty: return RAPIDJSON_ERROR_STRING("The document is empty."); + case kParseErrorDocumentRootNotSingular: return RAPIDJSON_ERROR_STRING("The document root must not be followed by other values."); + + case kParseErrorValueInvalid: return RAPIDJSON_ERROR_STRING("Invalid value."); + + case kParseErrorObjectMissName: return RAPIDJSON_ERROR_STRING("Missing a name for object member."); + case kParseErrorObjectMissColon: return RAPIDJSON_ERROR_STRING("Missing a colon after a name of object member."); + case kParseErrorObjectMissCommaOrCurlyBracket: return RAPIDJSON_ERROR_STRING("Missing a comma or '}' after an object member."); + + case kParseErrorArrayMissCommaOrSquareBracket: return RAPIDJSON_ERROR_STRING("Missing a comma or ']' after an array element."); + + case kParseErrorStringUnicodeEscapeInvalidHex: return RAPIDJSON_ERROR_STRING("Incorrect hex digit after \\u escape in string."); + case kParseErrorStringUnicodeSurrogateInvalid: return RAPIDJSON_ERROR_STRING("The surrogate pair in string is invalid."); + case kParseErrorStringEscapeInvalid: return RAPIDJSON_ERROR_STRING("Invalid escape character in string."); + case kParseErrorStringMissQuotationMark: return RAPIDJSON_ERROR_STRING("Missing a closing quotation mark in string."); + case kParseErrorStringInvalidEncoding: return RAPIDJSON_ERROR_STRING("Invalid encoding in string."); + + case kParseErrorNumberTooBig: return RAPIDJSON_ERROR_STRING("Number too big to be stored in double."); + case kParseErrorNumberMissFraction: return RAPIDJSON_ERROR_STRING("Miss fraction part in number."); + case kParseErrorNumberMissExponent: return RAPIDJSON_ERROR_STRING("Miss exponent in number."); + + case kParseErrorTermination: return RAPIDJSON_ERROR_STRING("Terminate parsing due to Handler error."); + case kParseErrorUnspecificSyntaxError: return RAPIDJSON_ERROR_STRING("Unspecific syntax error."); + + default: return RAPIDJSON_ERROR_STRING("Unknown error."); + } +} + +RAPIDJSON_NAMESPACE_END + +#ifdef __clang__ +RAPIDJSON_DIAG_POP +#endif + +#endif // RAPIDJSON_ERROR_EN_H_ diff --git a/include/rapidjson/error/error.h b/include/rapidjson/error/error.h new file mode 100644 index 0000000..95cb31a --- /dev/null +++ b/include/rapidjson/error/error.h @@ -0,0 +1,155 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_ERROR_ERROR_H_ +#define RAPIDJSON_ERROR_ERROR_H_ + +#include "../rapidjson.h" + +#ifdef __clang__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(padded) +#endif + +/*! \file error.h */ + +/*! \defgroup RAPIDJSON_ERRORS RapidJSON error handling */ + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_ERROR_CHARTYPE + +//! Character type of error messages. +/*! \ingroup RAPIDJSON_ERRORS + The default character type is \c char. + On Windows, user can define this macro as \c TCHAR for supporting both + unicode/non-unicode settings. +*/ +#ifndef RAPIDJSON_ERROR_CHARTYPE +#define RAPIDJSON_ERROR_CHARTYPE char +#endif + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_ERROR_STRING + +//! Macro for converting string literial to \ref RAPIDJSON_ERROR_CHARTYPE[]. +/*! \ingroup RAPIDJSON_ERRORS + By default this conversion macro does nothing. + On Windows, user can define this macro as \c _T(x) for supporting both + unicode/non-unicode settings. +*/ +#ifndef RAPIDJSON_ERROR_STRING +#define RAPIDJSON_ERROR_STRING(x) x +#endif + +RAPIDJSON_NAMESPACE_BEGIN + +/////////////////////////////////////////////////////////////////////////////// +// ParseErrorCode + +//! Error code of parsing. +/*! \ingroup RAPIDJSON_ERRORS + \see GenericReader::Parse, GenericReader::GetParseErrorCode +*/ +enum ParseErrorCode { + kParseErrorNone = 0, //!< No error. + + kParseErrorDocumentEmpty, //!< The document is empty. + kParseErrorDocumentRootNotSingular, //!< The document root must not follow by other values. + + kParseErrorValueInvalid, //!< Invalid value. + + kParseErrorObjectMissName, //!< Missing a name for object member. + kParseErrorObjectMissColon, //!< Missing a colon after a name of object member. + kParseErrorObjectMissCommaOrCurlyBracket, //!< Missing a comma or '}' after an object member. + + kParseErrorArrayMissCommaOrSquareBracket, //!< Missing a comma or ']' after an array element. + + kParseErrorStringUnicodeEscapeInvalidHex, //!< Incorrect hex digit after \\u escape in string. + kParseErrorStringUnicodeSurrogateInvalid, //!< The surrogate pair in string is invalid. + kParseErrorStringEscapeInvalid, //!< Invalid escape character in string. + kParseErrorStringMissQuotationMark, //!< Missing a closing quotation mark in string. + kParseErrorStringInvalidEncoding, //!< Invalid encoding in string. + + kParseErrorNumberTooBig, //!< Number too big to be stored in double. + kParseErrorNumberMissFraction, //!< Miss fraction part in number. + kParseErrorNumberMissExponent, //!< Miss exponent in number. + + kParseErrorTermination, //!< Parsing was terminated. + kParseErrorUnspecificSyntaxError //!< Unspecific syntax error. +}; + +//! Result of parsing (wraps ParseErrorCode) +/*! + \ingroup RAPIDJSON_ERRORS + \code + Document doc; + ParseResult ok = doc.Parse("[42]"); + if (!ok) { + fprintf(stderr, "JSON parse error: %s (%u)", + GetParseError_En(ok.Code()), ok.Offset()); + exit(EXIT_FAILURE); + } + \endcode + \see GenericReader::Parse, GenericDocument::Parse +*/ +struct ParseResult { +public: + //! Default constructor, no error. + ParseResult() : code_(kParseErrorNone), offset_(0) {} + //! Constructor to set an error. + ParseResult(ParseErrorCode code, size_t offset) : code_(code), offset_(offset) {} + + //! Get the error code. + ParseErrorCode Code() const { return code_; } + //! Get the error offset, if \ref IsError(), 0 otherwise. + size_t Offset() const { return offset_; } + + //! Conversion to \c bool, returns \c true, iff !\ref IsError(). + operator bool() const { return !IsError(); } + //! Whether the result is an error. + bool IsError() const { return code_ != kParseErrorNone; } + + bool operator==(const ParseResult& that) const { return code_ == that.code_; } + bool operator==(ParseErrorCode code) const { return code_ == code; } + friend bool operator==(ParseErrorCode code, const ParseResult & err) { return code == err.code_; } + + //! Reset error code. + void Clear() { Set(kParseErrorNone); } + //! Update error code and offset. + void Set(ParseErrorCode code, size_t offset = 0) { code_ = code; offset_ = offset; } + +private: + ParseErrorCode code_; + size_t offset_; +}; + +//! Function pointer type of GetParseError(). +/*! \ingroup RAPIDJSON_ERRORS + + This is the prototype for \c GetParseError_X(), where \c X is a locale. + User can dynamically change locale in runtime, e.g.: +\code + GetParseErrorFunc GetParseError = GetParseError_En; // or whatever + const RAPIDJSON_ERROR_CHARTYPE* s = GetParseError(document.GetParseErrorCode()); +\endcode +*/ +typedef const RAPIDJSON_ERROR_CHARTYPE* (*GetParseErrorFunc)(ParseErrorCode); + +RAPIDJSON_NAMESPACE_END + +#ifdef __clang__ +RAPIDJSON_DIAG_POP +#endif + +#endif // RAPIDJSON_ERROR_ERROR_H_ diff --git a/include/rapidjson/filereadstream.h b/include/rapidjson/filereadstream.h new file mode 100644 index 0000000..b56ea13 --- /dev/null +++ b/include/rapidjson/filereadstream.h @@ -0,0 +1,99 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_FILEREADSTREAM_H_ +#define RAPIDJSON_FILEREADSTREAM_H_ + +#include "stream.h" +#include + +#ifdef __clang__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(padded) +RAPIDJSON_DIAG_OFF(unreachable-code) +RAPIDJSON_DIAG_OFF(missing-noreturn) +#endif + +RAPIDJSON_NAMESPACE_BEGIN + +//! File byte stream for input using fread(). +/*! + \note implements Stream concept +*/ +class FileReadStream { +public: + typedef char Ch; //!< Character type (byte). + + //! Constructor. + /*! + \param fp File pointer opened for read. + \param buffer user-supplied buffer. + \param bufferSize size of buffer in bytes. Must >=4 bytes. + */ + FileReadStream(std::FILE* fp, char* buffer, size_t bufferSize) : fp_(fp), buffer_(buffer), bufferSize_(bufferSize), bufferLast_(0), current_(buffer_), readCount_(0), count_(0), eof_(false) { + RAPIDJSON_ASSERT(fp_ != 0); + RAPIDJSON_ASSERT(bufferSize >= 4); + Read(); + } + + Ch Peek() const { return *current_; } + Ch Take() { Ch c = *current_; Read(); return c; } + size_t Tell() const { return count_ + static_cast(current_ - buffer_); } + + // Not implemented + void Put(Ch) { RAPIDJSON_ASSERT(false); } + void Flush() { RAPIDJSON_ASSERT(false); } + Ch* PutBegin() { RAPIDJSON_ASSERT(false); return 0; } + size_t PutEnd(Ch*) { RAPIDJSON_ASSERT(false); return 0; } + + // For encoding detection only. + const Ch* Peek4() const { + return (current_ + 4 <= bufferLast_) ? current_ : 0; + } + +private: + void Read() { + if (current_ < bufferLast_) + ++current_; + else if (!eof_) { + count_ += readCount_; + readCount_ = fread(buffer_, 1, bufferSize_, fp_); + bufferLast_ = buffer_ + readCount_ - 1; + current_ = buffer_; + + if (readCount_ < bufferSize_) { + buffer_[readCount_] = '\0'; + ++bufferLast_; + eof_ = true; + } + } + } + + std::FILE* fp_; + Ch *buffer_; + size_t bufferSize_; + Ch *bufferLast_; + Ch *current_; + size_t readCount_; + size_t count_; //!< Number of characters read + bool eof_; +}; + +RAPIDJSON_NAMESPACE_END + +#ifdef __clang__ +RAPIDJSON_DIAG_POP +#endif + +#endif // RAPIDJSON_FILESTREAM_H_ diff --git a/include/rapidjson/filewritestream.h b/include/rapidjson/filewritestream.h new file mode 100644 index 0000000..6378dd6 --- /dev/null +++ b/include/rapidjson/filewritestream.h @@ -0,0 +1,104 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_FILEWRITESTREAM_H_ +#define RAPIDJSON_FILEWRITESTREAM_H_ + +#include "stream.h" +#include + +#ifdef __clang__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(unreachable-code) +#endif + +RAPIDJSON_NAMESPACE_BEGIN + +//! Wrapper of C file stream for input using fread(). +/*! + \note implements Stream concept +*/ +class FileWriteStream { +public: + typedef char Ch; //!< Character type. Only support char. + + FileWriteStream(std::FILE* fp, char* buffer, size_t bufferSize) : fp_(fp), buffer_(buffer), bufferEnd_(buffer + bufferSize), current_(buffer_) { + RAPIDJSON_ASSERT(fp_ != 0); + } + + void Put(char c) { + if (current_ >= bufferEnd_) + Flush(); + + *current_++ = c; + } + + void PutN(char c, size_t n) { + size_t avail = static_cast(bufferEnd_ - current_); + while (n > avail) { + std::memset(current_, c, avail); + current_ += avail; + Flush(); + n -= avail; + avail = static_cast(bufferEnd_ - current_); + } + + if (n > 0) { + std::memset(current_, c, n); + current_ += n; + } + } + + void Flush() { + if (current_ != buffer_) { + size_t result = fwrite(buffer_, 1, static_cast(current_ - buffer_), fp_); + if (result < static_cast(current_ - buffer_)) { + // failure deliberately ignored at this time + // added to avoid warn_unused_result build errors + } + current_ = buffer_; + } + } + + // Not implemented + char Peek() const { RAPIDJSON_ASSERT(false); return 0; } + char Take() { RAPIDJSON_ASSERT(false); return 0; } + size_t Tell() const { RAPIDJSON_ASSERT(false); return 0; } + char* PutBegin() { RAPIDJSON_ASSERT(false); return 0; } + size_t PutEnd(char*) { RAPIDJSON_ASSERT(false); return 0; } + +private: + // Prohibit copy constructor & assignment operator. + FileWriteStream(const FileWriteStream&); + FileWriteStream& operator=(const FileWriteStream&); + + std::FILE* fp_; + char *buffer_; + char *bufferEnd_; + char *current_; +}; + +//! Implement specialized version of PutN() with memset() for better performance. +template<> +inline void PutN(FileWriteStream& stream, char c, size_t n) { + stream.PutN(c, n); +} + +RAPIDJSON_NAMESPACE_END + +#ifdef __clang__ +RAPIDJSON_DIAG_POP +#endif + +#endif // RAPIDJSON_FILESTREAM_H_ diff --git a/include/rapidjson/fwd.h b/include/rapidjson/fwd.h new file mode 100644 index 0000000..e8104e8 --- /dev/null +++ b/include/rapidjson/fwd.h @@ -0,0 +1,151 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_FWD_H_ +#define RAPIDJSON_FWD_H_ + +#include "rapidjson.h" + +RAPIDJSON_NAMESPACE_BEGIN + +// encodings.h + +template struct UTF8; +template struct UTF16; +template struct UTF16BE; +template struct UTF16LE; +template struct UTF32; +template struct UTF32BE; +template struct UTF32LE; +template struct ASCII; +template struct AutoUTF; + +template +struct Transcoder; + +// allocators.h + +class CrtAllocator; + +template +class MemoryPoolAllocator; + +// stream.h + +template +struct GenericStringStream; + +typedef GenericStringStream > StringStream; + +template +struct GenericInsituStringStream; + +typedef GenericInsituStringStream > InsituStringStream; + +// stringbuffer.h + +template +class GenericStringBuffer; + +typedef GenericStringBuffer, CrtAllocator> StringBuffer; + +// filereadstream.h + +class FileReadStream; + +// filewritestream.h + +class FileWriteStream; + +// memorybuffer.h + +template +struct GenericMemoryBuffer; + +typedef GenericMemoryBuffer MemoryBuffer; + +// memorystream.h + +struct MemoryStream; + +// reader.h + +template +struct BaseReaderHandler; + +template +class GenericReader; + +typedef GenericReader, UTF8, CrtAllocator> Reader; + +// writer.h + +template +class Writer; + +// prettywriter.h + +template +class PrettyWriter; + +// document.h + +template +struct GenericMember; + +template +class GenericMemberIterator; + +template +struct GenericStringRef; + +template +class GenericValue; + +typedef GenericValue, MemoryPoolAllocator > Value; + +template +class GenericDocument; + +typedef GenericDocument, MemoryPoolAllocator, CrtAllocator> Document; + +// pointer.h + +template +class GenericPointer; + +typedef GenericPointer Pointer; + +// schema.h + +template +class IGenericRemoteSchemaDocumentProvider; + +template +class GenericSchemaDocument; + +typedef GenericSchemaDocument SchemaDocument; +typedef IGenericRemoteSchemaDocumentProvider IRemoteSchemaDocumentProvider; + +template < + typename SchemaDocumentType, + typename OutputHandler, + typename StateAllocator> +class GenericSchemaValidator; + +typedef GenericSchemaValidator, void>, CrtAllocator> SchemaValidator; + +RAPIDJSON_NAMESPACE_END + +#endif // RAPIDJSON_RAPIDJSONFWD_H_ diff --git a/include/rapidjson/internal/biginteger.h b/include/rapidjson/internal/biginteger.h new file mode 100644 index 0000000..9d3e88c --- /dev/null +++ b/include/rapidjson/internal/biginteger.h @@ -0,0 +1,290 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_BIGINTEGER_H_ +#define RAPIDJSON_BIGINTEGER_H_ + +#include "../rapidjson.h" + +#if defined(_MSC_VER) && defined(_M_AMD64) +#include // for _umul128 +#pragma intrinsic(_umul128) +#endif + +RAPIDJSON_NAMESPACE_BEGIN +namespace internal { + +class BigInteger { +public: + typedef uint64_t Type; + + BigInteger(const BigInteger& rhs) : count_(rhs.count_) { + std::memcpy(digits_, rhs.digits_, count_ * sizeof(Type)); + } + + explicit BigInteger(uint64_t u) : count_(1) { + digits_[0] = u; + } + + BigInteger(const char* decimals, size_t length) : count_(1) { + RAPIDJSON_ASSERT(length > 0); + digits_[0] = 0; + size_t i = 0; + const size_t kMaxDigitPerIteration = 19; // 2^64 = 18446744073709551616 > 10^19 + while (length >= kMaxDigitPerIteration) { + AppendDecimal64(decimals + i, decimals + i + kMaxDigitPerIteration); + length -= kMaxDigitPerIteration; + i += kMaxDigitPerIteration; + } + + if (length > 0) + AppendDecimal64(decimals + i, decimals + i + length); + } + + BigInteger& operator=(const BigInteger &rhs) + { + if (this != &rhs) { + count_ = rhs.count_; + std::memcpy(digits_, rhs.digits_, count_ * sizeof(Type)); + } + return *this; + } + + BigInteger& operator=(uint64_t u) { + digits_[0] = u; + count_ = 1; + return *this; + } + + BigInteger& operator+=(uint64_t u) { + Type backup = digits_[0]; + digits_[0] += u; + for (size_t i = 0; i < count_ - 1; i++) { + if (digits_[i] >= backup) + return *this; // no carry + backup = digits_[i + 1]; + digits_[i + 1] += 1; + } + + // Last carry + if (digits_[count_ - 1] < backup) + PushBack(1); + + return *this; + } + + BigInteger& operator*=(uint64_t u) { + if (u == 0) return *this = 0; + if (u == 1) return *this; + if (*this == 1) return *this = u; + + uint64_t k = 0; + for (size_t i = 0; i < count_; i++) { + uint64_t hi; + digits_[i] = MulAdd64(digits_[i], u, k, &hi); + k = hi; + } + + if (k > 0) + PushBack(k); + + return *this; + } + + BigInteger& operator*=(uint32_t u) { + if (u == 0) return *this = 0; + if (u == 1) return *this; + if (*this == 1) return *this = u; + + uint64_t k = 0; + for (size_t i = 0; i < count_; i++) { + const uint64_t c = digits_[i] >> 32; + const uint64_t d = digits_[i] & 0xFFFFFFFF; + const uint64_t uc = u * c; + const uint64_t ud = u * d; + const uint64_t p0 = ud + k; + const uint64_t p1 = uc + (p0 >> 32); + digits_[i] = (p0 & 0xFFFFFFFF) | (p1 << 32); + k = p1 >> 32; + } + + if (k > 0) + PushBack(k); + + return *this; + } + + BigInteger& operator<<=(size_t shift) { + if (IsZero() || shift == 0) return *this; + + size_t offset = shift / kTypeBit; + size_t interShift = shift % kTypeBit; + RAPIDJSON_ASSERT(count_ + offset <= kCapacity); + + if (interShift == 0) { + std::memmove(&digits_[count_ - 1 + offset], &digits_[count_ - 1], count_ * sizeof(Type)); + count_ += offset; + } + else { + digits_[count_] = 0; + for (size_t i = count_; i > 0; i--) + digits_[i + offset] = (digits_[i] << interShift) | (digits_[i - 1] >> (kTypeBit - interShift)); + digits_[offset] = digits_[0] << interShift; + count_ += offset; + if (digits_[count_]) + count_++; + } + + std::memset(digits_, 0, offset * sizeof(Type)); + + return *this; + } + + bool operator==(const BigInteger& rhs) const { + return count_ == rhs.count_ && std::memcmp(digits_, rhs.digits_, count_ * sizeof(Type)) == 0; + } + + bool operator==(const Type rhs) const { + return count_ == 1 && digits_[0] == rhs; + } + + BigInteger& MultiplyPow5(unsigned exp) { + static const uint32_t kPow5[12] = { + 5, + 5 * 5, + 5 * 5 * 5, + 5 * 5 * 5 * 5, + 5 * 5 * 5 * 5 * 5, + 5 * 5 * 5 * 5 * 5 * 5, + 5 * 5 * 5 * 5 * 5 * 5 * 5, + 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5, + 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5, + 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5, + 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5, + 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5 + }; + if (exp == 0) return *this; + for (; exp >= 27; exp -= 27) *this *= RAPIDJSON_UINT64_C2(0X6765C793, 0XFA10079D); // 5^27 + for (; exp >= 13; exp -= 13) *this *= static_cast(1220703125u); // 5^13 + if (exp > 0) *this *= kPow5[exp - 1]; + return *this; + } + + // Compute absolute difference of this and rhs. + // Assume this != rhs + bool Difference(const BigInteger& rhs, BigInteger* out) const { + int cmp = Compare(rhs); + RAPIDJSON_ASSERT(cmp != 0); + const BigInteger *a, *b; // Makes a > b + bool ret; + if (cmp < 0) { a = &rhs; b = this; ret = true; } + else { a = this; b = &rhs; ret = false; } + + Type borrow = 0; + for (size_t i = 0; i < a->count_; i++) { + Type d = a->digits_[i] - borrow; + if (i < b->count_) + d -= b->digits_[i]; + borrow = (d > a->digits_[i]) ? 1 : 0; + out->digits_[i] = d; + if (d != 0) + out->count_ = i + 1; + } + + return ret; + } + + int Compare(const BigInteger& rhs) const { + if (count_ != rhs.count_) + return count_ < rhs.count_ ? -1 : 1; + + for (size_t i = count_; i-- > 0;) + if (digits_[i] != rhs.digits_[i]) + return digits_[i] < rhs.digits_[i] ? -1 : 1; + + return 0; + } + + size_t GetCount() const { return count_; } + Type GetDigit(size_t index) const { RAPIDJSON_ASSERT(index < count_); return digits_[index]; } + bool IsZero() const { return count_ == 1 && digits_[0] == 0; } + +private: + void AppendDecimal64(const char* begin, const char* end) { + uint64_t u = ParseUint64(begin, end); + if (IsZero()) + *this = u; + else { + unsigned exp = static_cast(end - begin); + (MultiplyPow5(exp) <<= exp) += u; // *this = *this * 10^exp + u + } + } + + void PushBack(Type digit) { + RAPIDJSON_ASSERT(count_ < kCapacity); + digits_[count_++] = digit; + } + + static uint64_t ParseUint64(const char* begin, const char* end) { + uint64_t r = 0; + for (const char* p = begin; p != end; ++p) { + RAPIDJSON_ASSERT(*p >= '0' && *p <= '9'); + r = r * 10u + static_cast(*p - '0'); + } + return r; + } + + // Assume a * b + k < 2^128 + static uint64_t MulAdd64(uint64_t a, uint64_t b, uint64_t k, uint64_t* outHigh) { +#if defined(_MSC_VER) && defined(_M_AMD64) + uint64_t low = _umul128(a, b, outHigh) + k; + if (low < k) + (*outHigh)++; + return low; +#elif (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) && defined(__x86_64__) + __extension__ typedef unsigned __int128 uint128; + uint128 p = static_cast(a) * static_cast(b); + p += k; + *outHigh = static_cast(p >> 64); + return static_cast(p); +#else + const uint64_t a0 = a & 0xFFFFFFFF, a1 = a >> 32, b0 = b & 0xFFFFFFFF, b1 = b >> 32; + uint64_t x0 = a0 * b0, x1 = a0 * b1, x2 = a1 * b0, x3 = a1 * b1; + x1 += (x0 >> 32); // can't give carry + x1 += x2; + if (x1 < x2) + x3 += (static_cast(1) << 32); + uint64_t lo = (x1 << 32) + (x0 & 0xFFFFFFFF); + uint64_t hi = x3 + (x1 >> 32); + + lo += k; + if (lo < k) + hi++; + *outHigh = hi; + return lo; +#endif + } + + static const size_t kBitCount = 3328; // 64bit * 54 > 10^1000 + static const size_t kCapacity = kBitCount / sizeof(Type); + static const size_t kTypeBit = sizeof(Type) * 8; + + Type digits_[kCapacity]; + size_t count_; +}; + +} // namespace internal +RAPIDJSON_NAMESPACE_END + +#endif // RAPIDJSON_BIGINTEGER_H_ diff --git a/include/rapidjson/internal/diyfp.h b/include/rapidjson/internal/diyfp.h new file mode 100644 index 0000000..c9fefdc --- /dev/null +++ b/include/rapidjson/internal/diyfp.h @@ -0,0 +1,258 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +// This is a C++ header-only implementation of Grisu2 algorithm from the publication: +// Loitsch, Florian. "Printing floating-point numbers quickly and accurately with +// integers." ACM Sigplan Notices 45.6 (2010): 233-243. + +#ifndef RAPIDJSON_DIYFP_H_ +#define RAPIDJSON_DIYFP_H_ + +#include "../rapidjson.h" + +#if defined(_MSC_VER) && defined(_M_AMD64) +#include +#pragma intrinsic(_BitScanReverse64) +#pragma intrinsic(_umul128) +#endif + +RAPIDJSON_NAMESPACE_BEGIN +namespace internal { + +#ifdef __GNUC__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(effc++) +#endif + +#ifdef __clang__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(padded) +#endif + +struct DiyFp { + DiyFp() : f(), e() {} + + DiyFp(uint64_t fp, int exp) : f(fp), e(exp) {} + + explicit DiyFp(double d) { + union { + double d; + uint64_t u64; + } u = { d }; + + int biased_e = static_cast((u.u64 & kDpExponentMask) >> kDpSignificandSize); + uint64_t significand = (u.u64 & kDpSignificandMask); + if (biased_e != 0) { + f = significand + kDpHiddenBit; + e = biased_e - kDpExponentBias; + } + else { + f = significand; + e = kDpMinExponent + 1; + } + } + + DiyFp operator-(const DiyFp& rhs) const { + return DiyFp(f - rhs.f, e); + } + + DiyFp operator*(const DiyFp& rhs) const { +#if defined(_MSC_VER) && defined(_M_AMD64) + uint64_t h; + uint64_t l = _umul128(f, rhs.f, &h); + if (l & (uint64_t(1) << 63)) // rounding + h++; + return DiyFp(h, e + rhs.e + 64); +#elif (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) && defined(__x86_64__) + __extension__ typedef unsigned __int128 uint128; + uint128 p = static_cast(f) * static_cast(rhs.f); + uint64_t h = static_cast(p >> 64); + uint64_t l = static_cast(p); + if (l & (uint64_t(1) << 63)) // rounding + h++; + return DiyFp(h, e + rhs.e + 64); +#else + const uint64_t M32 = 0xFFFFFFFF; + const uint64_t a = f >> 32; + const uint64_t b = f & M32; + const uint64_t c = rhs.f >> 32; + const uint64_t d = rhs.f & M32; + const uint64_t ac = a * c; + const uint64_t bc = b * c; + const uint64_t ad = a * d; + const uint64_t bd = b * d; + uint64_t tmp = (bd >> 32) + (ad & M32) + (bc & M32); + tmp += 1U << 31; /// mult_round + return DiyFp(ac + (ad >> 32) + (bc >> 32) + (tmp >> 32), e + rhs.e + 64); +#endif + } + + DiyFp Normalize() const { +#if defined(_MSC_VER) && defined(_M_AMD64) + unsigned long index; + _BitScanReverse64(&index, f); + return DiyFp(f << (63 - index), e - (63 - index)); +#elif defined(__GNUC__) && __GNUC__ >= 4 + int s = __builtin_clzll(f); + return DiyFp(f << s, e - s); +#else + DiyFp res = *this; + while (!(res.f & (static_cast(1) << 63))) { + res.f <<= 1; + res.e--; + } + return res; +#endif + } + + DiyFp NormalizeBoundary() const { + DiyFp res = *this; + while (!(res.f & (kDpHiddenBit << 1))) { + res.f <<= 1; + res.e--; + } + res.f <<= (kDiySignificandSize - kDpSignificandSize - 2); + res.e = res.e - (kDiySignificandSize - kDpSignificandSize - 2); + return res; + } + + void NormalizedBoundaries(DiyFp* minus, DiyFp* plus) const { + DiyFp pl = DiyFp((f << 1) + 1, e - 1).NormalizeBoundary(); + DiyFp mi = (f == kDpHiddenBit) ? DiyFp((f << 2) - 1, e - 2) : DiyFp((f << 1) - 1, e - 1); + mi.f <<= mi.e - pl.e; + mi.e = pl.e; + *plus = pl; + *minus = mi; + } + + double ToDouble() const { + union { + double d; + uint64_t u64; + }u; + const uint64_t be = (e == kDpDenormalExponent && (f & kDpHiddenBit) == 0) ? 0 : + static_cast(e + kDpExponentBias); + u.u64 = (f & kDpSignificandMask) | (be << kDpSignificandSize); + return u.d; + } + + static const int kDiySignificandSize = 64; + static const int kDpSignificandSize = 52; + static const int kDpExponentBias = 0x3FF + kDpSignificandSize; + static const int kDpMaxExponent = 0x7FF - kDpExponentBias; + static const int kDpMinExponent = -kDpExponentBias; + static const int kDpDenormalExponent = -kDpExponentBias + 1; + static const uint64_t kDpExponentMask = RAPIDJSON_UINT64_C2(0x7FF00000, 0x00000000); + static const uint64_t kDpSignificandMask = RAPIDJSON_UINT64_C2(0x000FFFFF, 0xFFFFFFFF); + static const uint64_t kDpHiddenBit = RAPIDJSON_UINT64_C2(0x00100000, 0x00000000); + + uint64_t f; + int e; +}; + +inline DiyFp GetCachedPowerByIndex(size_t index) { + // 10^-348, 10^-340, ..., 10^340 + static const uint64_t kCachedPowers_F[] = { + RAPIDJSON_UINT64_C2(0xfa8fd5a0, 0x081c0288), RAPIDJSON_UINT64_C2(0xbaaee17f, 0xa23ebf76), + RAPIDJSON_UINT64_C2(0x8b16fb20, 0x3055ac76), RAPIDJSON_UINT64_C2(0xcf42894a, 0x5dce35ea), + RAPIDJSON_UINT64_C2(0x9a6bb0aa, 0x55653b2d), RAPIDJSON_UINT64_C2(0xe61acf03, 0x3d1a45df), + RAPIDJSON_UINT64_C2(0xab70fe17, 0xc79ac6ca), RAPIDJSON_UINT64_C2(0xff77b1fc, 0xbebcdc4f), + RAPIDJSON_UINT64_C2(0xbe5691ef, 0x416bd60c), RAPIDJSON_UINT64_C2(0x8dd01fad, 0x907ffc3c), + RAPIDJSON_UINT64_C2(0xd3515c28, 0x31559a83), RAPIDJSON_UINT64_C2(0x9d71ac8f, 0xada6c9b5), + RAPIDJSON_UINT64_C2(0xea9c2277, 0x23ee8bcb), RAPIDJSON_UINT64_C2(0xaecc4991, 0x4078536d), + RAPIDJSON_UINT64_C2(0x823c1279, 0x5db6ce57), RAPIDJSON_UINT64_C2(0xc2109436, 0x4dfb5637), + RAPIDJSON_UINT64_C2(0x9096ea6f, 0x3848984f), RAPIDJSON_UINT64_C2(0xd77485cb, 0x25823ac7), + RAPIDJSON_UINT64_C2(0xa086cfcd, 0x97bf97f4), RAPIDJSON_UINT64_C2(0xef340a98, 0x172aace5), + RAPIDJSON_UINT64_C2(0xb23867fb, 0x2a35b28e), RAPIDJSON_UINT64_C2(0x84c8d4df, 0xd2c63f3b), + RAPIDJSON_UINT64_C2(0xc5dd4427, 0x1ad3cdba), RAPIDJSON_UINT64_C2(0x936b9fce, 0xbb25c996), + RAPIDJSON_UINT64_C2(0xdbac6c24, 0x7d62a584), RAPIDJSON_UINT64_C2(0xa3ab6658, 0x0d5fdaf6), + RAPIDJSON_UINT64_C2(0xf3e2f893, 0xdec3f126), RAPIDJSON_UINT64_C2(0xb5b5ada8, 0xaaff80b8), + RAPIDJSON_UINT64_C2(0x87625f05, 0x6c7c4a8b), RAPIDJSON_UINT64_C2(0xc9bcff60, 0x34c13053), + RAPIDJSON_UINT64_C2(0x964e858c, 0x91ba2655), RAPIDJSON_UINT64_C2(0xdff97724, 0x70297ebd), + RAPIDJSON_UINT64_C2(0xa6dfbd9f, 0xb8e5b88f), RAPIDJSON_UINT64_C2(0xf8a95fcf, 0x88747d94), + RAPIDJSON_UINT64_C2(0xb9447093, 0x8fa89bcf), RAPIDJSON_UINT64_C2(0x8a08f0f8, 0xbf0f156b), + RAPIDJSON_UINT64_C2(0xcdb02555, 0x653131b6), RAPIDJSON_UINT64_C2(0x993fe2c6, 0xd07b7fac), + RAPIDJSON_UINT64_C2(0xe45c10c4, 0x2a2b3b06), RAPIDJSON_UINT64_C2(0xaa242499, 0x697392d3), + RAPIDJSON_UINT64_C2(0xfd87b5f2, 0x8300ca0e), RAPIDJSON_UINT64_C2(0xbce50864, 0x92111aeb), + RAPIDJSON_UINT64_C2(0x8cbccc09, 0x6f5088cc), RAPIDJSON_UINT64_C2(0xd1b71758, 0xe219652c), + RAPIDJSON_UINT64_C2(0x9c400000, 0x00000000), RAPIDJSON_UINT64_C2(0xe8d4a510, 0x00000000), + RAPIDJSON_UINT64_C2(0xad78ebc5, 0xac620000), RAPIDJSON_UINT64_C2(0x813f3978, 0xf8940984), + RAPIDJSON_UINT64_C2(0xc097ce7b, 0xc90715b3), RAPIDJSON_UINT64_C2(0x8f7e32ce, 0x7bea5c70), + RAPIDJSON_UINT64_C2(0xd5d238a4, 0xabe98068), RAPIDJSON_UINT64_C2(0x9f4f2726, 0x179a2245), + RAPIDJSON_UINT64_C2(0xed63a231, 0xd4c4fb27), RAPIDJSON_UINT64_C2(0xb0de6538, 0x8cc8ada8), + RAPIDJSON_UINT64_C2(0x83c7088e, 0x1aab65db), RAPIDJSON_UINT64_C2(0xc45d1df9, 0x42711d9a), + RAPIDJSON_UINT64_C2(0x924d692c, 0xa61be758), RAPIDJSON_UINT64_C2(0xda01ee64, 0x1a708dea), + RAPIDJSON_UINT64_C2(0xa26da399, 0x9aef774a), RAPIDJSON_UINT64_C2(0xf209787b, 0xb47d6b85), + RAPIDJSON_UINT64_C2(0xb454e4a1, 0x79dd1877), RAPIDJSON_UINT64_C2(0x865b8692, 0x5b9bc5c2), + RAPIDJSON_UINT64_C2(0xc83553c5, 0xc8965d3d), RAPIDJSON_UINT64_C2(0x952ab45c, 0xfa97a0b3), + RAPIDJSON_UINT64_C2(0xde469fbd, 0x99a05fe3), RAPIDJSON_UINT64_C2(0xa59bc234, 0xdb398c25), + RAPIDJSON_UINT64_C2(0xf6c69a72, 0xa3989f5c), RAPIDJSON_UINT64_C2(0xb7dcbf53, 0x54e9bece), + RAPIDJSON_UINT64_C2(0x88fcf317, 0xf22241e2), RAPIDJSON_UINT64_C2(0xcc20ce9b, 0xd35c78a5), + RAPIDJSON_UINT64_C2(0x98165af3, 0x7b2153df), RAPIDJSON_UINT64_C2(0xe2a0b5dc, 0x971f303a), + RAPIDJSON_UINT64_C2(0xa8d9d153, 0x5ce3b396), RAPIDJSON_UINT64_C2(0xfb9b7cd9, 0xa4a7443c), + RAPIDJSON_UINT64_C2(0xbb764c4c, 0xa7a44410), RAPIDJSON_UINT64_C2(0x8bab8eef, 0xb6409c1a), + RAPIDJSON_UINT64_C2(0xd01fef10, 0xa657842c), RAPIDJSON_UINT64_C2(0x9b10a4e5, 0xe9913129), + RAPIDJSON_UINT64_C2(0xe7109bfb, 0xa19c0c9d), RAPIDJSON_UINT64_C2(0xac2820d9, 0x623bf429), + RAPIDJSON_UINT64_C2(0x80444b5e, 0x7aa7cf85), RAPIDJSON_UINT64_C2(0xbf21e440, 0x03acdd2d), + RAPIDJSON_UINT64_C2(0x8e679c2f, 0x5e44ff8f), RAPIDJSON_UINT64_C2(0xd433179d, 0x9c8cb841), + RAPIDJSON_UINT64_C2(0x9e19db92, 0xb4e31ba9), RAPIDJSON_UINT64_C2(0xeb96bf6e, 0xbadf77d9), + RAPIDJSON_UINT64_C2(0xaf87023b, 0x9bf0ee6b) + }; + static const int16_t kCachedPowers_E[] = { + -1220, -1193, -1166, -1140, -1113, -1087, -1060, -1034, -1007, -980, + -954, -927, -901, -874, -847, -821, -794, -768, -741, -715, + -688, -661, -635, -608, -582, -555, -529, -502, -475, -449, + -422, -396, -369, -343, -316, -289, -263, -236, -210, -183, + -157, -130, -103, -77, -50, -24, 3, 30, 56, 83, + 109, 136, 162, 189, 216, 242, 269, 295, 322, 348, + 375, 402, 428, 455, 481, 508, 534, 561, 588, 614, + 641, 667, 694, 720, 747, 774, 800, 827, 853, 880, + 907, 933, 960, 986, 1013, 1039, 1066 + }; + return DiyFp(kCachedPowers_F[index], kCachedPowers_E[index]); +} + +inline DiyFp GetCachedPower(int e, int* K) { + + //int k = static_cast(ceil((-61 - e) * 0.30102999566398114)) + 374; + double dk = (-61 - e) * 0.30102999566398114 + 347; // dk must be positive, so can do ceiling in positive + int k = static_cast(dk); + if (dk - k > 0.0) + k++; + + unsigned index = static_cast((k >> 3) + 1); + *K = -(-348 + static_cast(index << 3)); // decimal exponent no need lookup table + + return GetCachedPowerByIndex(index); +} + +inline DiyFp GetCachedPower10(int exp, int *outExp) { + unsigned index = (static_cast(exp) + 348u) / 8u; + *outExp = -348 + static_cast(index) * 8; + return GetCachedPowerByIndex(index); + } + +#ifdef __GNUC__ +RAPIDJSON_DIAG_POP +#endif + +#ifdef __clang__ +RAPIDJSON_DIAG_POP +RAPIDJSON_DIAG_OFF(padded) +#endif + +} // namespace internal +RAPIDJSON_NAMESPACE_END + +#endif // RAPIDJSON_DIYFP_H_ diff --git a/include/rapidjson/internal/dtoa.h b/include/rapidjson/internal/dtoa.h new file mode 100644 index 0000000..8d6350e --- /dev/null +++ b/include/rapidjson/internal/dtoa.h @@ -0,0 +1,245 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +// This is a C++ header-only implementation of Grisu2 algorithm from the publication: +// Loitsch, Florian. "Printing floating-point numbers quickly and accurately with +// integers." ACM Sigplan Notices 45.6 (2010): 233-243. + +#ifndef RAPIDJSON_DTOA_ +#define RAPIDJSON_DTOA_ + +#include "itoa.h" // GetDigitsLut() +#include "diyfp.h" +#include "ieee754.h" + +RAPIDJSON_NAMESPACE_BEGIN +namespace internal { + +#ifdef __GNUC__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(effc++) +RAPIDJSON_DIAG_OFF(array-bounds) // some gcc versions generate wrong warnings https://gcc.gnu.org/bugzilla/show_bug.cgi?id=59124 +#endif + +inline void GrisuRound(char* buffer, int len, uint64_t delta, uint64_t rest, uint64_t ten_kappa, uint64_t wp_w) { + while (rest < wp_w && delta - rest >= ten_kappa && + (rest + ten_kappa < wp_w || /// closer + wp_w - rest > rest + ten_kappa - wp_w)) { + buffer[len - 1]--; + rest += ten_kappa; + } +} + +inline unsigned CountDecimalDigit32(uint32_t n) { + // Simple pure C++ implementation was faster than __builtin_clz version in this situation. + if (n < 10) return 1; + if (n < 100) return 2; + if (n < 1000) return 3; + if (n < 10000) return 4; + if (n < 100000) return 5; + if (n < 1000000) return 6; + if (n < 10000000) return 7; + if (n < 100000000) return 8; + // Will not reach 10 digits in DigitGen() + //if (n < 1000000000) return 9; + //return 10; + return 9; +} + +inline void DigitGen(const DiyFp& W, const DiyFp& Mp, uint64_t delta, char* buffer, int* len, int* K) { + static const uint32_t kPow10[] = { 1, 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000 }; + const DiyFp one(uint64_t(1) << -Mp.e, Mp.e); + const DiyFp wp_w = Mp - W; + uint32_t p1 = static_cast(Mp.f >> -one.e); + uint64_t p2 = Mp.f & (one.f - 1); + unsigned kappa = CountDecimalDigit32(p1); // kappa in [0, 9] + *len = 0; + + while (kappa > 0) { + uint32_t d = 0; + switch (kappa) { + case 9: d = p1 / 100000000; p1 %= 100000000; break; + case 8: d = p1 / 10000000; p1 %= 10000000; break; + case 7: d = p1 / 1000000; p1 %= 1000000; break; + case 6: d = p1 / 100000; p1 %= 100000; break; + case 5: d = p1 / 10000; p1 %= 10000; break; + case 4: d = p1 / 1000; p1 %= 1000; break; + case 3: d = p1 / 100; p1 %= 100; break; + case 2: d = p1 / 10; p1 %= 10; break; + case 1: d = p1; p1 = 0; break; + default:; + } + if (d || *len) + buffer[(*len)++] = static_cast('0' + static_cast(d)); + kappa--; + uint64_t tmp = (static_cast(p1) << -one.e) + p2; + if (tmp <= delta) { + *K += kappa; + GrisuRound(buffer, *len, delta, tmp, static_cast(kPow10[kappa]) << -one.e, wp_w.f); + return; + } + } + + // kappa = 0 + for (;;) { + p2 *= 10; + delta *= 10; + char d = static_cast(p2 >> -one.e); + if (d || *len) + buffer[(*len)++] = static_cast('0' + d); + p2 &= one.f - 1; + kappa--; + if (p2 < delta) { + *K += kappa; + int index = -static_cast(kappa); + GrisuRound(buffer, *len, delta, p2, one.f, wp_w.f * (index < 9 ? kPow10[-static_cast(kappa)] : 0)); + return; + } + } +} + +inline void Grisu2(double value, char* buffer, int* length, int* K) { + const DiyFp v(value); + DiyFp w_m, w_p; + v.NormalizedBoundaries(&w_m, &w_p); + + const DiyFp c_mk = GetCachedPower(w_p.e, K); + const DiyFp W = v.Normalize() * c_mk; + DiyFp Wp = w_p * c_mk; + DiyFp Wm = w_m * c_mk; + Wm.f++; + Wp.f--; + DigitGen(W, Wp, Wp.f - Wm.f, buffer, length, K); +} + +inline char* WriteExponent(int K, char* buffer) { + if (K < 0) { + *buffer++ = '-'; + K = -K; + } + + if (K >= 100) { + *buffer++ = static_cast('0' + static_cast(K / 100)); + K %= 100; + const char* d = GetDigitsLut() + K * 2; + *buffer++ = d[0]; + *buffer++ = d[1]; + } + else if (K >= 10) { + const char* d = GetDigitsLut() + K * 2; + *buffer++ = d[0]; + *buffer++ = d[1]; + } + else + *buffer++ = static_cast('0' + static_cast(K)); + + return buffer; +} + +inline char* Prettify(char* buffer, int length, int k, int maxDecimalPlaces) { + const int kk = length + k; // 10^(kk-1) <= v < 10^kk + + if (0 <= k && kk <= 21) { + // 1234e7 -> 12340000000 + for (int i = length; i < kk; i++) + buffer[i] = '0'; + buffer[kk] = '.'; + buffer[kk + 1] = '0'; + return &buffer[kk + 2]; + } + else if (0 < kk && kk <= 21) { + // 1234e-2 -> 12.34 + std::memmove(&buffer[kk + 1], &buffer[kk], static_cast(length - kk)); + buffer[kk] = '.'; + if (0 > k + maxDecimalPlaces) { + // When maxDecimalPlaces = 2, 1.2345 -> 1.23, 1.102 -> 1.1 + // Remove extra trailing zeros (at least one) after truncation. + for (int i = kk + maxDecimalPlaces; i > kk + 1; i--) + if (buffer[i] != '0') + return &buffer[i + 1]; + return &buffer[kk + 2]; // Reserve one zero + } + else + return &buffer[length + 1]; + } + else if (-6 < kk && kk <= 0) { + // 1234e-6 -> 0.001234 + const int offset = 2 - kk; + std::memmove(&buffer[offset], &buffer[0], static_cast(length)); + buffer[0] = '0'; + buffer[1] = '.'; + for (int i = 2; i < offset; i++) + buffer[i] = '0'; + if (length - kk > maxDecimalPlaces) { + // When maxDecimalPlaces = 2, 0.123 -> 0.12, 0.102 -> 0.1 + // Remove extra trailing zeros (at least one) after truncation. + for (int i = maxDecimalPlaces + 1; i > 2; i--) + if (buffer[i] != '0') + return &buffer[i + 1]; + return &buffer[3]; // Reserve one zero + } + else + return &buffer[length + offset]; + } + else if (kk < -maxDecimalPlaces) { + // Truncate to zero + buffer[0] = '0'; + buffer[1] = '.'; + buffer[2] = '0'; + return &buffer[3]; + } + else if (length == 1) { + // 1e30 + buffer[1] = 'e'; + return WriteExponent(kk - 1, &buffer[2]); + } + else { + // 1234e30 -> 1.234e33 + std::memmove(&buffer[2], &buffer[1], static_cast(length - 1)); + buffer[1] = '.'; + buffer[length + 1] = 'e'; + return WriteExponent(kk - 1, &buffer[0 + length + 2]); + } +} + +inline char* dtoa(double value, char* buffer, int maxDecimalPlaces = 324) { + RAPIDJSON_ASSERT(maxDecimalPlaces >= 1); + Double d(value); + if (d.IsZero()) { + if (d.Sign()) + *buffer++ = '-'; // -0.0, Issue #289 + buffer[0] = '0'; + buffer[1] = '.'; + buffer[2] = '0'; + return &buffer[3]; + } + else { + if (value < 0) { + *buffer++ = '-'; + value = -value; + } + int length, K; + Grisu2(value, buffer, &length, &K); + return Prettify(buffer, length, K, maxDecimalPlaces); + } +} + +#ifdef __GNUC__ +RAPIDJSON_DIAG_POP +#endif + +} // namespace internal +RAPIDJSON_NAMESPACE_END + +#endif // RAPIDJSON_DTOA_ diff --git a/include/rapidjson/internal/ieee754.h b/include/rapidjson/internal/ieee754.h new file mode 100644 index 0000000..82bb0b9 --- /dev/null +++ b/include/rapidjson/internal/ieee754.h @@ -0,0 +1,78 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_IEEE754_ +#define RAPIDJSON_IEEE754_ + +#include "../rapidjson.h" + +RAPIDJSON_NAMESPACE_BEGIN +namespace internal { + +class Double { +public: + Double() {} + Double(double d) : d_(d) {} + Double(uint64_t u) : u_(u) {} + + double Value() const { return d_; } + uint64_t Uint64Value() const { return u_; } + + double NextPositiveDouble() const { + RAPIDJSON_ASSERT(!Sign()); + return Double(u_ + 1).Value(); + } + + bool Sign() const { return (u_ & kSignMask) != 0; } + uint64_t Significand() const { return u_ & kSignificandMask; } + int Exponent() const { return static_cast(((u_ & kExponentMask) >> kSignificandSize) - kExponentBias); } + + bool IsNan() const { return (u_ & kExponentMask) == kExponentMask && Significand() != 0; } + bool IsInf() const { return (u_ & kExponentMask) == kExponentMask && Significand() == 0; } + bool IsNanOrInf() const { return (u_ & kExponentMask) == kExponentMask; } + bool IsNormal() const { return (u_ & kExponentMask) != 0 || Significand() == 0; } + bool IsZero() const { return (u_ & (kExponentMask | kSignificandMask)) == 0; } + + uint64_t IntegerSignificand() const { return IsNormal() ? Significand() | kHiddenBit : Significand(); } + int IntegerExponent() const { return (IsNormal() ? Exponent() : kDenormalExponent) - kSignificandSize; } + uint64_t ToBias() const { return (u_ & kSignMask) ? ~u_ + 1 : u_ | kSignMask; } + + static unsigned EffectiveSignificandSize(int order) { + if (order >= -1021) + return 53; + else if (order <= -1074) + return 0; + else + return static_cast(order) + 1074; + } + +private: + static const int kSignificandSize = 52; + static const int kExponentBias = 0x3FF; + static const int kDenormalExponent = 1 - kExponentBias; + static const uint64_t kSignMask = RAPIDJSON_UINT64_C2(0x80000000, 0x00000000); + static const uint64_t kExponentMask = RAPIDJSON_UINT64_C2(0x7FF00000, 0x00000000); + static const uint64_t kSignificandMask = RAPIDJSON_UINT64_C2(0x000FFFFF, 0xFFFFFFFF); + static const uint64_t kHiddenBit = RAPIDJSON_UINT64_C2(0x00100000, 0x00000000); + + union { + double d_; + uint64_t u_; + }; +}; + +} // namespace internal +RAPIDJSON_NAMESPACE_END + +#endif // RAPIDJSON_IEEE754_ diff --git a/include/rapidjson/internal/itoa.h b/include/rapidjson/internal/itoa.h new file mode 100644 index 0000000..01a4e7e --- /dev/null +++ b/include/rapidjson/internal/itoa.h @@ -0,0 +1,304 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_ITOA_ +#define RAPIDJSON_ITOA_ + +#include "../rapidjson.h" + +RAPIDJSON_NAMESPACE_BEGIN +namespace internal { + +inline const char* GetDigitsLut() { + static const char cDigitsLut[200] = { + '0','0','0','1','0','2','0','3','0','4','0','5','0','6','0','7','0','8','0','9', + '1','0','1','1','1','2','1','3','1','4','1','5','1','6','1','7','1','8','1','9', + '2','0','2','1','2','2','2','3','2','4','2','5','2','6','2','7','2','8','2','9', + '3','0','3','1','3','2','3','3','3','4','3','5','3','6','3','7','3','8','3','9', + '4','0','4','1','4','2','4','3','4','4','4','5','4','6','4','7','4','8','4','9', + '5','0','5','1','5','2','5','3','5','4','5','5','5','6','5','7','5','8','5','9', + '6','0','6','1','6','2','6','3','6','4','6','5','6','6','6','7','6','8','6','9', + '7','0','7','1','7','2','7','3','7','4','7','5','7','6','7','7','7','8','7','9', + '8','0','8','1','8','2','8','3','8','4','8','5','8','6','8','7','8','8','8','9', + '9','0','9','1','9','2','9','3','9','4','9','5','9','6','9','7','9','8','9','9' + }; + return cDigitsLut; +} + +inline char* u32toa(uint32_t value, char* buffer) { + const char* cDigitsLut = GetDigitsLut(); + + if (value < 10000) { + const uint32_t d1 = (value / 100) << 1; + const uint32_t d2 = (value % 100) << 1; + + if (value >= 1000) + *buffer++ = cDigitsLut[d1]; + if (value >= 100) + *buffer++ = cDigitsLut[d1 + 1]; + if (value >= 10) + *buffer++ = cDigitsLut[d2]; + *buffer++ = cDigitsLut[d2 + 1]; + } + else if (value < 100000000) { + // value = bbbbcccc + const uint32_t b = value / 10000; + const uint32_t c = value % 10000; + + const uint32_t d1 = (b / 100) << 1; + const uint32_t d2 = (b % 100) << 1; + + const uint32_t d3 = (c / 100) << 1; + const uint32_t d4 = (c % 100) << 1; + + if (value >= 10000000) + *buffer++ = cDigitsLut[d1]; + if (value >= 1000000) + *buffer++ = cDigitsLut[d1 + 1]; + if (value >= 100000) + *buffer++ = cDigitsLut[d2]; + *buffer++ = cDigitsLut[d2 + 1]; + + *buffer++ = cDigitsLut[d3]; + *buffer++ = cDigitsLut[d3 + 1]; + *buffer++ = cDigitsLut[d4]; + *buffer++ = cDigitsLut[d4 + 1]; + } + else { + // value = aabbbbcccc in decimal + + const uint32_t a = value / 100000000; // 1 to 42 + value %= 100000000; + + if (a >= 10) { + const unsigned i = a << 1; + *buffer++ = cDigitsLut[i]; + *buffer++ = cDigitsLut[i + 1]; + } + else + *buffer++ = static_cast('0' + static_cast(a)); + + const uint32_t b = value / 10000; // 0 to 9999 + const uint32_t c = value % 10000; // 0 to 9999 + + const uint32_t d1 = (b / 100) << 1; + const uint32_t d2 = (b % 100) << 1; + + const uint32_t d3 = (c / 100) << 1; + const uint32_t d4 = (c % 100) << 1; + + *buffer++ = cDigitsLut[d1]; + *buffer++ = cDigitsLut[d1 + 1]; + *buffer++ = cDigitsLut[d2]; + *buffer++ = cDigitsLut[d2 + 1]; + *buffer++ = cDigitsLut[d3]; + *buffer++ = cDigitsLut[d3 + 1]; + *buffer++ = cDigitsLut[d4]; + *buffer++ = cDigitsLut[d4 + 1]; + } + return buffer; +} + +inline char* i32toa(int32_t value, char* buffer) { + uint32_t u = static_cast(value); + if (value < 0) { + *buffer++ = '-'; + u = ~u + 1; + } + + return u32toa(u, buffer); +} + +inline char* u64toa(uint64_t value, char* buffer) { + const char* cDigitsLut = GetDigitsLut(); + const uint64_t kTen8 = 100000000; + const uint64_t kTen9 = kTen8 * 10; + const uint64_t kTen10 = kTen8 * 100; + const uint64_t kTen11 = kTen8 * 1000; + const uint64_t kTen12 = kTen8 * 10000; + const uint64_t kTen13 = kTen8 * 100000; + const uint64_t kTen14 = kTen8 * 1000000; + const uint64_t kTen15 = kTen8 * 10000000; + const uint64_t kTen16 = kTen8 * kTen8; + + if (value < kTen8) { + uint32_t v = static_cast(value); + if (v < 10000) { + const uint32_t d1 = (v / 100) << 1; + const uint32_t d2 = (v % 100) << 1; + + if (v >= 1000) + *buffer++ = cDigitsLut[d1]; + if (v >= 100) + *buffer++ = cDigitsLut[d1 + 1]; + if (v >= 10) + *buffer++ = cDigitsLut[d2]; + *buffer++ = cDigitsLut[d2 + 1]; + } + else { + // value = bbbbcccc + const uint32_t b = v / 10000; + const uint32_t c = v % 10000; + + const uint32_t d1 = (b / 100) << 1; + const uint32_t d2 = (b % 100) << 1; + + const uint32_t d3 = (c / 100) << 1; + const uint32_t d4 = (c % 100) << 1; + + if (value >= 10000000) + *buffer++ = cDigitsLut[d1]; + if (value >= 1000000) + *buffer++ = cDigitsLut[d1 + 1]; + if (value >= 100000) + *buffer++ = cDigitsLut[d2]; + *buffer++ = cDigitsLut[d2 + 1]; + + *buffer++ = cDigitsLut[d3]; + *buffer++ = cDigitsLut[d3 + 1]; + *buffer++ = cDigitsLut[d4]; + *buffer++ = cDigitsLut[d4 + 1]; + } + } + else if (value < kTen16) { + const uint32_t v0 = static_cast(value / kTen8); + const uint32_t v1 = static_cast(value % kTen8); + + const uint32_t b0 = v0 / 10000; + const uint32_t c0 = v0 % 10000; + + const uint32_t d1 = (b0 / 100) << 1; + const uint32_t d2 = (b0 % 100) << 1; + + const uint32_t d3 = (c0 / 100) << 1; + const uint32_t d4 = (c0 % 100) << 1; + + const uint32_t b1 = v1 / 10000; + const uint32_t c1 = v1 % 10000; + + const uint32_t d5 = (b1 / 100) << 1; + const uint32_t d6 = (b1 % 100) << 1; + + const uint32_t d7 = (c1 / 100) << 1; + const uint32_t d8 = (c1 % 100) << 1; + + if (value >= kTen15) + *buffer++ = cDigitsLut[d1]; + if (value >= kTen14) + *buffer++ = cDigitsLut[d1 + 1]; + if (value >= kTen13) + *buffer++ = cDigitsLut[d2]; + if (value >= kTen12) + *buffer++ = cDigitsLut[d2 + 1]; + if (value >= kTen11) + *buffer++ = cDigitsLut[d3]; + if (value >= kTen10) + *buffer++ = cDigitsLut[d3 + 1]; + if (value >= kTen9) + *buffer++ = cDigitsLut[d4]; + if (value >= kTen8) + *buffer++ = cDigitsLut[d4 + 1]; + + *buffer++ = cDigitsLut[d5]; + *buffer++ = cDigitsLut[d5 + 1]; + *buffer++ = cDigitsLut[d6]; + *buffer++ = cDigitsLut[d6 + 1]; + *buffer++ = cDigitsLut[d7]; + *buffer++ = cDigitsLut[d7 + 1]; + *buffer++ = cDigitsLut[d8]; + *buffer++ = cDigitsLut[d8 + 1]; + } + else { + const uint32_t a = static_cast(value / kTen16); // 1 to 1844 + value %= kTen16; + + if (a < 10) + *buffer++ = static_cast('0' + static_cast(a)); + else if (a < 100) { + const uint32_t i = a << 1; + *buffer++ = cDigitsLut[i]; + *buffer++ = cDigitsLut[i + 1]; + } + else if (a < 1000) { + *buffer++ = static_cast('0' + static_cast(a / 100)); + + const uint32_t i = (a % 100) << 1; + *buffer++ = cDigitsLut[i]; + *buffer++ = cDigitsLut[i + 1]; + } + else { + const uint32_t i = (a / 100) << 1; + const uint32_t j = (a % 100) << 1; + *buffer++ = cDigitsLut[i]; + *buffer++ = cDigitsLut[i + 1]; + *buffer++ = cDigitsLut[j]; + *buffer++ = cDigitsLut[j + 1]; + } + + const uint32_t v0 = static_cast(value / kTen8); + const uint32_t v1 = static_cast(value % kTen8); + + const uint32_t b0 = v0 / 10000; + const uint32_t c0 = v0 % 10000; + + const uint32_t d1 = (b0 / 100) << 1; + const uint32_t d2 = (b0 % 100) << 1; + + const uint32_t d3 = (c0 / 100) << 1; + const uint32_t d4 = (c0 % 100) << 1; + + const uint32_t b1 = v1 / 10000; + const uint32_t c1 = v1 % 10000; + + const uint32_t d5 = (b1 / 100) << 1; + const uint32_t d6 = (b1 % 100) << 1; + + const uint32_t d7 = (c1 / 100) << 1; + const uint32_t d8 = (c1 % 100) << 1; + + *buffer++ = cDigitsLut[d1]; + *buffer++ = cDigitsLut[d1 + 1]; + *buffer++ = cDigitsLut[d2]; + *buffer++ = cDigitsLut[d2 + 1]; + *buffer++ = cDigitsLut[d3]; + *buffer++ = cDigitsLut[d3 + 1]; + *buffer++ = cDigitsLut[d4]; + *buffer++ = cDigitsLut[d4 + 1]; + *buffer++ = cDigitsLut[d5]; + *buffer++ = cDigitsLut[d5 + 1]; + *buffer++ = cDigitsLut[d6]; + *buffer++ = cDigitsLut[d6 + 1]; + *buffer++ = cDigitsLut[d7]; + *buffer++ = cDigitsLut[d7 + 1]; + *buffer++ = cDigitsLut[d8]; + *buffer++ = cDigitsLut[d8 + 1]; + } + + return buffer; +} + +inline char* i64toa(int64_t value, char* buffer) { + uint64_t u = static_cast(value); + if (value < 0) { + *buffer++ = '-'; + u = ~u + 1; + } + + return u64toa(u, buffer); +} + +} // namespace internal +RAPIDJSON_NAMESPACE_END + +#endif // RAPIDJSON_ITOA_ diff --git a/include/rapidjson/internal/meta.h b/include/rapidjson/internal/meta.h new file mode 100644 index 0000000..5a9aaa4 --- /dev/null +++ b/include/rapidjson/internal/meta.h @@ -0,0 +1,181 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_INTERNAL_META_H_ +#define RAPIDJSON_INTERNAL_META_H_ + +#include "../rapidjson.h" + +#ifdef __GNUC__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(effc++) +#endif +#if defined(_MSC_VER) +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(6334) +#endif + +#if RAPIDJSON_HAS_CXX11_TYPETRAITS +#include +#endif + +//@cond RAPIDJSON_INTERNAL +RAPIDJSON_NAMESPACE_BEGIN +namespace internal { + +// Helper to wrap/convert arbitrary types to void, useful for arbitrary type matching +template struct Void { typedef void Type; }; + +/////////////////////////////////////////////////////////////////////////////// +// BoolType, TrueType, FalseType +// +template struct BoolType { + static const bool Value = Cond; + typedef BoolType Type; +}; +typedef BoolType TrueType; +typedef BoolType FalseType; + + +/////////////////////////////////////////////////////////////////////////////// +// SelectIf, BoolExpr, NotExpr, AndExpr, OrExpr +// + +template struct SelectIfImpl { template struct Apply { typedef T1 Type; }; }; +template <> struct SelectIfImpl { template struct Apply { typedef T2 Type; }; }; +template struct SelectIfCond : SelectIfImpl::template Apply {}; +template struct SelectIf : SelectIfCond {}; + +template struct AndExprCond : FalseType {}; +template <> struct AndExprCond : TrueType {}; +template struct OrExprCond : TrueType {}; +template <> struct OrExprCond : FalseType {}; + +template struct BoolExpr : SelectIf::Type {}; +template struct NotExpr : SelectIf::Type {}; +template struct AndExpr : AndExprCond::Type {}; +template struct OrExpr : OrExprCond::Type {}; + + +/////////////////////////////////////////////////////////////////////////////// +// AddConst, MaybeAddConst, RemoveConst +template struct AddConst { typedef const T Type; }; +template struct MaybeAddConst : SelectIfCond {}; +template struct RemoveConst { typedef T Type; }; +template struct RemoveConst { typedef T Type; }; + + +/////////////////////////////////////////////////////////////////////////////// +// IsSame, IsConst, IsMoreConst, IsPointer +// +template struct IsSame : FalseType {}; +template struct IsSame : TrueType {}; + +template struct IsConst : FalseType {}; +template struct IsConst : TrueType {}; + +template +struct IsMoreConst + : AndExpr::Type, typename RemoveConst::Type>, + BoolType::Value >= IsConst::Value> >::Type {}; + +template struct IsPointer : FalseType {}; +template struct IsPointer : TrueType {}; + +/////////////////////////////////////////////////////////////////////////////// +// IsBaseOf +// +#if RAPIDJSON_HAS_CXX11_TYPETRAITS + +template struct IsBaseOf + : BoolType< ::std::is_base_of::value> {}; + +#else // simplified version adopted from Boost + +template struct IsBaseOfImpl { + RAPIDJSON_STATIC_ASSERT(sizeof(B) != 0); + RAPIDJSON_STATIC_ASSERT(sizeof(D) != 0); + + typedef char (&Yes)[1]; + typedef char (&No) [2]; + + template + static Yes Check(const D*, T); + static No Check(const B*, int); + + struct Host { + operator const B*() const; + operator const D*(); + }; + + enum { Value = (sizeof(Check(Host(), 0)) == sizeof(Yes)) }; +}; + +template struct IsBaseOf + : OrExpr, BoolExpr > >::Type {}; + +#endif // RAPIDJSON_HAS_CXX11_TYPETRAITS + + +////////////////////////////////////////////////////////////////////////// +// EnableIf / DisableIf +// +template struct EnableIfCond { typedef T Type; }; +template struct EnableIfCond { /* empty */ }; + +template struct DisableIfCond { typedef T Type; }; +template struct DisableIfCond { /* empty */ }; + +template +struct EnableIf : EnableIfCond {}; + +template +struct DisableIf : DisableIfCond {}; + +// SFINAE helpers +struct SfinaeTag {}; +template struct RemoveSfinaeTag; +template struct RemoveSfinaeTag { typedef T Type; }; + +#define RAPIDJSON_REMOVEFPTR_(type) \ + typename ::RAPIDJSON_NAMESPACE::internal::RemoveSfinaeTag \ + < ::RAPIDJSON_NAMESPACE::internal::SfinaeTag&(*) type>::Type + +#define RAPIDJSON_ENABLEIF(cond) \ + typename ::RAPIDJSON_NAMESPACE::internal::EnableIf \ + ::Type * = NULL + +#define RAPIDJSON_DISABLEIF(cond) \ + typename ::RAPIDJSON_NAMESPACE::internal::DisableIf \ + ::Type * = NULL + +#define RAPIDJSON_ENABLEIF_RETURN(cond,returntype) \ + typename ::RAPIDJSON_NAMESPACE::internal::EnableIf \ + ::Type + +#define RAPIDJSON_DISABLEIF_RETURN(cond,returntype) \ + typename ::RAPIDJSON_NAMESPACE::internal::DisableIf \ + ::Type + +} // namespace internal +RAPIDJSON_NAMESPACE_END +//@endcond + +#if defined(__GNUC__) || defined(_MSC_VER) +RAPIDJSON_DIAG_POP +#endif + +#endif // RAPIDJSON_INTERNAL_META_H_ diff --git a/include/rapidjson/internal/pow10.h b/include/rapidjson/internal/pow10.h new file mode 100644 index 0000000..02f475d --- /dev/null +++ b/include/rapidjson/internal/pow10.h @@ -0,0 +1,55 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_POW10_ +#define RAPIDJSON_POW10_ + +#include "../rapidjson.h" + +RAPIDJSON_NAMESPACE_BEGIN +namespace internal { + +//! Computes integer powers of 10 in double (10.0^n). +/*! This function uses lookup table for fast and accurate results. + \param n non-negative exponent. Must <= 308. + \return 10.0^n +*/ +inline double Pow10(int n) { + static const double e[] = { // 1e-0...1e308: 309 * 8 bytes = 2472 bytes + 1e+0, + 1e+1, 1e+2, 1e+3, 1e+4, 1e+5, 1e+6, 1e+7, 1e+8, 1e+9, 1e+10, 1e+11, 1e+12, 1e+13, 1e+14, 1e+15, 1e+16, 1e+17, 1e+18, 1e+19, 1e+20, + 1e+21, 1e+22, 1e+23, 1e+24, 1e+25, 1e+26, 1e+27, 1e+28, 1e+29, 1e+30, 1e+31, 1e+32, 1e+33, 1e+34, 1e+35, 1e+36, 1e+37, 1e+38, 1e+39, 1e+40, + 1e+41, 1e+42, 1e+43, 1e+44, 1e+45, 1e+46, 1e+47, 1e+48, 1e+49, 1e+50, 1e+51, 1e+52, 1e+53, 1e+54, 1e+55, 1e+56, 1e+57, 1e+58, 1e+59, 1e+60, + 1e+61, 1e+62, 1e+63, 1e+64, 1e+65, 1e+66, 1e+67, 1e+68, 1e+69, 1e+70, 1e+71, 1e+72, 1e+73, 1e+74, 1e+75, 1e+76, 1e+77, 1e+78, 1e+79, 1e+80, + 1e+81, 1e+82, 1e+83, 1e+84, 1e+85, 1e+86, 1e+87, 1e+88, 1e+89, 1e+90, 1e+91, 1e+92, 1e+93, 1e+94, 1e+95, 1e+96, 1e+97, 1e+98, 1e+99, 1e+100, + 1e+101,1e+102,1e+103,1e+104,1e+105,1e+106,1e+107,1e+108,1e+109,1e+110,1e+111,1e+112,1e+113,1e+114,1e+115,1e+116,1e+117,1e+118,1e+119,1e+120, + 1e+121,1e+122,1e+123,1e+124,1e+125,1e+126,1e+127,1e+128,1e+129,1e+130,1e+131,1e+132,1e+133,1e+134,1e+135,1e+136,1e+137,1e+138,1e+139,1e+140, + 1e+141,1e+142,1e+143,1e+144,1e+145,1e+146,1e+147,1e+148,1e+149,1e+150,1e+151,1e+152,1e+153,1e+154,1e+155,1e+156,1e+157,1e+158,1e+159,1e+160, + 1e+161,1e+162,1e+163,1e+164,1e+165,1e+166,1e+167,1e+168,1e+169,1e+170,1e+171,1e+172,1e+173,1e+174,1e+175,1e+176,1e+177,1e+178,1e+179,1e+180, + 1e+181,1e+182,1e+183,1e+184,1e+185,1e+186,1e+187,1e+188,1e+189,1e+190,1e+191,1e+192,1e+193,1e+194,1e+195,1e+196,1e+197,1e+198,1e+199,1e+200, + 1e+201,1e+202,1e+203,1e+204,1e+205,1e+206,1e+207,1e+208,1e+209,1e+210,1e+211,1e+212,1e+213,1e+214,1e+215,1e+216,1e+217,1e+218,1e+219,1e+220, + 1e+221,1e+222,1e+223,1e+224,1e+225,1e+226,1e+227,1e+228,1e+229,1e+230,1e+231,1e+232,1e+233,1e+234,1e+235,1e+236,1e+237,1e+238,1e+239,1e+240, + 1e+241,1e+242,1e+243,1e+244,1e+245,1e+246,1e+247,1e+248,1e+249,1e+250,1e+251,1e+252,1e+253,1e+254,1e+255,1e+256,1e+257,1e+258,1e+259,1e+260, + 1e+261,1e+262,1e+263,1e+264,1e+265,1e+266,1e+267,1e+268,1e+269,1e+270,1e+271,1e+272,1e+273,1e+274,1e+275,1e+276,1e+277,1e+278,1e+279,1e+280, + 1e+281,1e+282,1e+283,1e+284,1e+285,1e+286,1e+287,1e+288,1e+289,1e+290,1e+291,1e+292,1e+293,1e+294,1e+295,1e+296,1e+297,1e+298,1e+299,1e+300, + 1e+301,1e+302,1e+303,1e+304,1e+305,1e+306,1e+307,1e+308 + }; + RAPIDJSON_ASSERT(n >= 0 && n <= 308); + return e[n]; +} + +} // namespace internal +RAPIDJSON_NAMESPACE_END + +#endif // RAPIDJSON_POW10_ diff --git a/include/rapidjson/internal/regex.h b/include/rapidjson/internal/regex.h new file mode 100644 index 0000000..422a524 --- /dev/null +++ b/include/rapidjson/internal/regex.h @@ -0,0 +1,701 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_INTERNAL_REGEX_H_ +#define RAPIDJSON_INTERNAL_REGEX_H_ + +#include "../allocators.h" +#include "../stream.h" +#include "stack.h" + +#ifdef __clang__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(padded) +RAPIDJSON_DIAG_OFF(switch-enum) +RAPIDJSON_DIAG_OFF(implicit-fallthrough) +#endif + +#ifdef __GNUC__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(effc++) +#endif + +#ifdef _MSC_VER +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(4512) // assignment operator could not be generated +#endif + +#ifndef RAPIDJSON_REGEX_VERBOSE +#define RAPIDJSON_REGEX_VERBOSE 0 +#endif + +RAPIDJSON_NAMESPACE_BEGIN +namespace internal { + +/////////////////////////////////////////////////////////////////////////////// +// GenericRegex + +static const SizeType kRegexInvalidState = ~SizeType(0); //!< Represents an invalid index in GenericRegex::State::out, out1 +static const SizeType kRegexInvalidRange = ~SizeType(0); + +//! Regular expression engine with subset of ECMAscript grammar. +/*! + Supported regular expression syntax: + - \c ab Concatenation + - \c a|b Alternation + - \c a? Zero or one + - \c a* Zero or more + - \c a+ One or more + - \c a{3} Exactly 3 times + - \c a{3,} At least 3 times + - \c a{3,5} 3 to 5 times + - \c (ab) Grouping + - \c ^a At the beginning + - \c a$ At the end + - \c . Any character + - \c [abc] Character classes + - \c [a-c] Character class range + - \c [a-z0-9_] Character class combination + - \c [^abc] Negated character classes + - \c [^a-c] Negated character class range + - \c [\b] Backspace (U+0008) + - \c \\| \\\\ ... Escape characters + - \c \\f Form feed (U+000C) + - \c \\n Line feed (U+000A) + - \c \\r Carriage return (U+000D) + - \c \\t Tab (U+0009) + - \c \\v Vertical tab (U+000B) + + \note This is a Thompson NFA engine, implemented with reference to + Cox, Russ. "Regular Expression Matching Can Be Simple And Fast (but is slow in Java, Perl, PHP, Python, Ruby,...).", + https://swtch.com/~rsc/regexp/regexp1.html +*/ +template +class GenericRegex { +public: + typedef typename Encoding::Ch Ch; + + GenericRegex(const Ch* source, Allocator* allocator = 0) : + states_(allocator, 256), ranges_(allocator, 256), root_(kRegexInvalidState), stateCount_(), rangeCount_(), + stateSet_(), state0_(allocator, 0), state1_(allocator, 0), anchorBegin_(), anchorEnd_() + { + GenericStringStream ss(source); + DecodedStream > ds(ss); + Parse(ds); + } + + ~GenericRegex() { + Allocator::Free(stateSet_); + } + + bool IsValid() const { + return root_ != kRegexInvalidState; + } + + template + bool Match(InputStream& is) const { + return SearchWithAnchoring(is, true, true); + } + + bool Match(const Ch* s) const { + GenericStringStream is(s); + return Match(is); + } + + template + bool Search(InputStream& is) const { + return SearchWithAnchoring(is, anchorBegin_, anchorEnd_); + } + + bool Search(const Ch* s) const { + GenericStringStream is(s); + return Search(is); + } + +private: + enum Operator { + kZeroOrOne, + kZeroOrMore, + kOneOrMore, + kConcatenation, + kAlternation, + kLeftParenthesis + }; + + static const unsigned kAnyCharacterClass = 0xFFFFFFFF; //!< For '.' + static const unsigned kRangeCharacterClass = 0xFFFFFFFE; + static const unsigned kRangeNegationFlag = 0x80000000; + + struct Range { + unsigned start; // + unsigned end; + SizeType next; + }; + + struct State { + SizeType out; //!< Equals to kInvalid for matching state + SizeType out1; //!< Equals to non-kInvalid for split + SizeType rangeStart; + unsigned codepoint; + }; + + struct Frag { + Frag(SizeType s, SizeType o, SizeType m) : start(s), out(o), minIndex(m) {} + SizeType start; + SizeType out; //!< link-list of all output states + SizeType minIndex; + }; + + template + class DecodedStream { + public: + DecodedStream(SourceStream& ss) : ss_(ss), codepoint_() { Decode(); } + unsigned Peek() { return codepoint_; } + unsigned Take() { + unsigned c = codepoint_; + if (c) // No further decoding when '\0' + Decode(); + return c; + } + + private: + void Decode() { + if (!Encoding::Decode(ss_, &codepoint_)) + codepoint_ = 0; + } + + SourceStream& ss_; + unsigned codepoint_; + }; + + State& GetState(SizeType index) { + RAPIDJSON_ASSERT(index < stateCount_); + return states_.template Bottom()[index]; + } + + const State& GetState(SizeType index) const { + RAPIDJSON_ASSERT(index < stateCount_); + return states_.template Bottom()[index]; + } + + Range& GetRange(SizeType index) { + RAPIDJSON_ASSERT(index < rangeCount_); + return ranges_.template Bottom()[index]; + } + + const Range& GetRange(SizeType index) const { + RAPIDJSON_ASSERT(index < rangeCount_); + return ranges_.template Bottom()[index]; + } + + template + void Parse(DecodedStream& ds) { + Allocator allocator; + Stack operandStack(&allocator, 256); // Frag + Stack operatorStack(&allocator, 256); // Operator + Stack atomCountStack(&allocator, 256); // unsigned (Atom per parenthesis) + + *atomCountStack.template Push() = 0; + + unsigned codepoint; + while (ds.Peek() != 0) { + switch (codepoint = ds.Take()) { + case '^': + anchorBegin_ = true; + break; + + case '$': + anchorEnd_ = true; + break; + + case '|': + while (!operatorStack.Empty() && *operatorStack.template Top() < kAlternation) + if (!Eval(operandStack, *operatorStack.template Pop(1))) + return; + *operatorStack.template Push() = kAlternation; + *atomCountStack.template Top() = 0; + break; + + case '(': + *operatorStack.template Push() = kLeftParenthesis; + *atomCountStack.template Push() = 0; + break; + + case ')': + while (!operatorStack.Empty() && *operatorStack.template Top() != kLeftParenthesis) + if (!Eval(operandStack, *operatorStack.template Pop(1))) + return; + if (operatorStack.Empty()) + return; + operatorStack.template Pop(1); + atomCountStack.template Pop(1); + ImplicitConcatenation(atomCountStack, operatorStack); + break; + + case '?': + if (!Eval(operandStack, kZeroOrOne)) + return; + break; + + case '*': + if (!Eval(operandStack, kZeroOrMore)) + return; + break; + + case '+': + if (!Eval(operandStack, kOneOrMore)) + return; + break; + + case '{': + { + unsigned n, m; + if (!ParseUnsigned(ds, &n)) + return; + + if (ds.Peek() == ',') { + ds.Take(); + if (ds.Peek() == '}') + m = kInfinityQuantifier; + else if (!ParseUnsigned(ds, &m) || m < n) + return; + } + else + m = n; + + if (!EvalQuantifier(operandStack, n, m) || ds.Peek() != '}') + return; + ds.Take(); + } + break; + + case '.': + PushOperand(operandStack, kAnyCharacterClass); + ImplicitConcatenation(atomCountStack, operatorStack); + break; + + case '[': + { + SizeType range; + if (!ParseRange(ds, &range)) + return; + SizeType s = NewState(kRegexInvalidState, kRegexInvalidState, kRangeCharacterClass); + GetState(s).rangeStart = range; + *operandStack.template Push() = Frag(s, s, s); + } + ImplicitConcatenation(atomCountStack, operatorStack); + break; + + case '\\': // Escape character + if (!CharacterEscape(ds, &codepoint)) + return; // Unsupported escape character + // fall through to default + + default: // Pattern character + PushOperand(operandStack, codepoint); + ImplicitConcatenation(atomCountStack, operatorStack); + } + } + + while (!operatorStack.Empty()) + if (!Eval(operandStack, *operatorStack.template Pop(1))) + return; + + // Link the operand to matching state. + if (operandStack.GetSize() == sizeof(Frag)) { + Frag* e = operandStack.template Pop(1); + Patch(e->out, NewState(kRegexInvalidState, kRegexInvalidState, 0)); + root_ = e->start; + +#if RAPIDJSON_REGEX_VERBOSE + printf("root: %d\n", root_); + for (SizeType i = 0; i < stateCount_ ; i++) { + State& s = GetState(i); + printf("[%2d] out: %2d out1: %2d c: '%c'\n", i, s.out, s.out1, (char)s.codepoint); + } + printf("\n"); +#endif + } + + // Preallocate buffer for SearchWithAnchoring() + RAPIDJSON_ASSERT(stateSet_ == 0); + if (stateCount_ > 0) { + stateSet_ = static_cast(states_.GetAllocator().Malloc(GetStateSetSize())); + state0_.template Reserve(stateCount_); + state1_.template Reserve(stateCount_); + } + } + + SizeType NewState(SizeType out, SizeType out1, unsigned codepoint) { + State* s = states_.template Push(); + s->out = out; + s->out1 = out1; + s->codepoint = codepoint; + s->rangeStart = kRegexInvalidRange; + return stateCount_++; + } + + void PushOperand(Stack& operandStack, unsigned codepoint) { + SizeType s = NewState(kRegexInvalidState, kRegexInvalidState, codepoint); + *operandStack.template Push() = Frag(s, s, s); + } + + void ImplicitConcatenation(Stack& atomCountStack, Stack& operatorStack) { + if (*atomCountStack.template Top()) + *operatorStack.template Push() = kConcatenation; + (*atomCountStack.template Top())++; + } + + SizeType Append(SizeType l1, SizeType l2) { + SizeType old = l1; + while (GetState(l1).out != kRegexInvalidState) + l1 = GetState(l1).out; + GetState(l1).out = l2; + return old; + } + + void Patch(SizeType l, SizeType s) { + for (SizeType next; l != kRegexInvalidState; l = next) { + next = GetState(l).out; + GetState(l).out = s; + } + } + + bool Eval(Stack& operandStack, Operator op) { + switch (op) { + case kConcatenation: + RAPIDJSON_ASSERT(operandStack.GetSize() >= sizeof(Frag) * 2); + { + Frag e2 = *operandStack.template Pop(1); + Frag e1 = *operandStack.template Pop(1); + Patch(e1.out, e2.start); + *operandStack.template Push() = Frag(e1.start, e2.out, Min(e1.minIndex, e2.minIndex)); + } + return true; + + case kAlternation: + if (operandStack.GetSize() >= sizeof(Frag) * 2) { + Frag e2 = *operandStack.template Pop(1); + Frag e1 = *operandStack.template Pop(1); + SizeType s = NewState(e1.start, e2.start, 0); + *operandStack.template Push() = Frag(s, Append(e1.out, e2.out), Min(e1.minIndex, e2.minIndex)); + return true; + } + return false; + + case kZeroOrOne: + if (operandStack.GetSize() >= sizeof(Frag)) { + Frag e = *operandStack.template Pop(1); + SizeType s = NewState(kRegexInvalidState, e.start, 0); + *operandStack.template Push() = Frag(s, Append(e.out, s), e.minIndex); + return true; + } + return false; + + case kZeroOrMore: + if (operandStack.GetSize() >= sizeof(Frag)) { + Frag e = *operandStack.template Pop(1); + SizeType s = NewState(kRegexInvalidState, e.start, 0); + Patch(e.out, s); + *operandStack.template Push() = Frag(s, s, e.minIndex); + return true; + } + return false; + + default: + RAPIDJSON_ASSERT(op == kOneOrMore); + if (operandStack.GetSize() >= sizeof(Frag)) { + Frag e = *operandStack.template Pop(1); + SizeType s = NewState(kRegexInvalidState, e.start, 0); + Patch(e.out, s); + *operandStack.template Push() = Frag(e.start, s, e.minIndex); + return true; + } + return false; + } + } + + bool EvalQuantifier(Stack& operandStack, unsigned n, unsigned m) { + RAPIDJSON_ASSERT(n <= m); + RAPIDJSON_ASSERT(operandStack.GetSize() >= sizeof(Frag)); + + if (n == 0) { + if (m == 0) // a{0} not support + return false; + else if (m == kInfinityQuantifier) + Eval(operandStack, kZeroOrMore); // a{0,} -> a* + else { + Eval(operandStack, kZeroOrOne); // a{0,5} -> a? + for (unsigned i = 0; i < m - 1; i++) + CloneTopOperand(operandStack); // a{0,5} -> a? a? a? a? a? + for (unsigned i = 0; i < m - 1; i++) + Eval(operandStack, kConcatenation); // a{0,5} -> a?a?a?a?a? + } + return true; + } + + for (unsigned i = 0; i < n - 1; i++) // a{3} -> a a a + CloneTopOperand(operandStack); + + if (m == kInfinityQuantifier) + Eval(operandStack, kOneOrMore); // a{3,} -> a a a+ + else if (m > n) { + CloneTopOperand(operandStack); // a{3,5} -> a a a a + Eval(operandStack, kZeroOrOne); // a{3,5} -> a a a a? + for (unsigned i = n; i < m - 1; i++) + CloneTopOperand(operandStack); // a{3,5} -> a a a a? a? + for (unsigned i = n; i < m; i++) + Eval(operandStack, kConcatenation); // a{3,5} -> a a aa?a? + } + + for (unsigned i = 0; i < n - 1; i++) + Eval(operandStack, kConcatenation); // a{3} -> aaa, a{3,} -> aaa+, a{3.5} -> aaaa?a? + + return true; + } + + static SizeType Min(SizeType a, SizeType b) { return a < b ? a : b; } + + void CloneTopOperand(Stack& operandStack) { + const Frag src = *operandStack.template Top(); // Copy constructor to prevent invalidation + SizeType count = stateCount_ - src.minIndex; // Assumes top operand contains states in [src->minIndex, stateCount_) + State* s = states_.template Push(count); + memcpy(s, &GetState(src.minIndex), count * sizeof(State)); + for (SizeType j = 0; j < count; j++) { + if (s[j].out != kRegexInvalidState) + s[j].out += count; + if (s[j].out1 != kRegexInvalidState) + s[j].out1 += count; + } + *operandStack.template Push() = Frag(src.start + count, src.out + count, src.minIndex + count); + stateCount_ += count; + } + + template + bool ParseUnsigned(DecodedStream& ds, unsigned* u) { + unsigned r = 0; + if (ds.Peek() < '0' || ds.Peek() > '9') + return false; + while (ds.Peek() >= '0' && ds.Peek() <= '9') { + if (r >= 429496729 && ds.Peek() > '5') // 2^32 - 1 = 4294967295 + return false; // overflow + r = r * 10 + (ds.Take() - '0'); + } + *u = r; + return true; + } + + template + bool ParseRange(DecodedStream& ds, SizeType* range) { + bool isBegin = true; + bool negate = false; + int step = 0; + SizeType start = kRegexInvalidRange; + SizeType current = kRegexInvalidRange; + unsigned codepoint; + while ((codepoint = ds.Take()) != 0) { + if (isBegin) { + isBegin = false; + if (codepoint == '^') { + negate = true; + continue; + } + } + + switch (codepoint) { + case ']': + if (start == kRegexInvalidRange) + return false; // Error: nothing inside [] + if (step == 2) { // Add trailing '-' + SizeType r = NewRange('-'); + RAPIDJSON_ASSERT(current != kRegexInvalidRange); + GetRange(current).next = r; + } + if (negate) + GetRange(start).start |= kRangeNegationFlag; + *range = start; + return true; + + case '\\': + if (ds.Peek() == 'b') { + ds.Take(); + codepoint = 0x0008; // Escape backspace character + } + else if (!CharacterEscape(ds, &codepoint)) + return false; + // fall through to default + + default: + switch (step) { + case 1: + if (codepoint == '-') { + step++; + break; + } + // fall through to step 0 for other characters + + case 0: + { + SizeType r = NewRange(codepoint); + if (current != kRegexInvalidRange) + GetRange(current).next = r; + if (start == kRegexInvalidRange) + start = r; + current = r; + } + step = 1; + break; + + default: + RAPIDJSON_ASSERT(step == 2); + GetRange(current).end = codepoint; + step = 0; + } + } + } + return false; + } + + SizeType NewRange(unsigned codepoint) { + Range* r = ranges_.template Push(); + r->start = r->end = codepoint; + r->next = kRegexInvalidRange; + return rangeCount_++; + } + + template + bool CharacterEscape(DecodedStream& ds, unsigned* escapedCodepoint) { + unsigned codepoint; + switch (codepoint = ds.Take()) { + case '^': + case '$': + case '|': + case '(': + case ')': + case '?': + case '*': + case '+': + case '.': + case '[': + case ']': + case '{': + case '}': + case '\\': + *escapedCodepoint = codepoint; return true; + case 'f': *escapedCodepoint = 0x000C; return true; + case 'n': *escapedCodepoint = 0x000A; return true; + case 'r': *escapedCodepoint = 0x000D; return true; + case 't': *escapedCodepoint = 0x0009; return true; + case 'v': *escapedCodepoint = 0x000B; return true; + default: + return false; // Unsupported escape character + } + } + + template + bool SearchWithAnchoring(InputStream& is, bool anchorBegin, bool anchorEnd) const { + RAPIDJSON_ASSERT(IsValid()); + DecodedStream ds(is); + + state0_.Clear(); + Stack *current = &state0_, *next = &state1_; + const size_t stateSetSize = GetStateSetSize(); + std::memset(stateSet_, 0, stateSetSize); + + bool matched = AddState(*current, root_); + unsigned codepoint; + while (!current->Empty() && (codepoint = ds.Take()) != 0) { + std::memset(stateSet_, 0, stateSetSize); + next->Clear(); + matched = false; + for (const SizeType* s = current->template Bottom(); s != current->template End(); ++s) { + const State& sr = GetState(*s); + if (sr.codepoint == codepoint || + sr.codepoint == kAnyCharacterClass || + (sr.codepoint == kRangeCharacterClass && MatchRange(sr.rangeStart, codepoint))) + { + matched = AddState(*next, sr.out) || matched; + if (!anchorEnd && matched) + return true; + } + if (!anchorBegin) + AddState(*next, root_); + } + internal::Swap(current, next); + } + + return matched; + } + + size_t GetStateSetSize() const { + return (stateCount_ + 31) / 32 * 4; + } + + // Return whether the added states is a match state + bool AddState(Stack& l, SizeType index) const { + RAPIDJSON_ASSERT(index != kRegexInvalidState); + + const State& s = GetState(index); + if (s.out1 != kRegexInvalidState) { // Split + bool matched = AddState(l, s.out); + return AddState(l, s.out1) || matched; + } + else if (!(stateSet_[index >> 5] & (1 << (index & 31)))) { + stateSet_[index >> 5] |= (1 << (index & 31)); + *l.template PushUnsafe() = index; + } + return s.out == kRegexInvalidState; // by using PushUnsafe() above, we can ensure s is not validated due to reallocation. + } + + bool MatchRange(SizeType rangeIndex, unsigned codepoint) const { + bool yes = (GetRange(rangeIndex).start & kRangeNegationFlag) == 0; + while (rangeIndex != kRegexInvalidRange) { + const Range& r = GetRange(rangeIndex); + if (codepoint >= (r.start & ~kRangeNegationFlag) && codepoint <= r.end) + return yes; + rangeIndex = r.next; + } + return !yes; + } + + Stack states_; + Stack ranges_; + SizeType root_; + SizeType stateCount_; + SizeType rangeCount_; + + static const unsigned kInfinityQuantifier = ~0u; + + // For SearchWithAnchoring() + uint32_t* stateSet_; // allocated by states_.GetAllocator() + mutable Stack state0_; + mutable Stack state1_; + bool anchorBegin_; + bool anchorEnd_; +}; + +typedef GenericRegex > Regex; + +} // namespace internal +RAPIDJSON_NAMESPACE_END + +#ifdef __clang__ +RAPIDJSON_DIAG_POP +#endif + +#ifdef _MSC_VER +RAPIDJSON_DIAG_POP +#endif + +#endif // RAPIDJSON_INTERNAL_REGEX_H_ diff --git a/include/rapidjson/internal/stack.h b/include/rapidjson/internal/stack.h new file mode 100644 index 0000000..022c9aa --- /dev/null +++ b/include/rapidjson/internal/stack.h @@ -0,0 +1,230 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_INTERNAL_STACK_H_ +#define RAPIDJSON_INTERNAL_STACK_H_ + +#include "../allocators.h" +#include "swap.h" + +#if defined(__clang__) +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(c++98-compat) +#endif + +RAPIDJSON_NAMESPACE_BEGIN +namespace internal { + +/////////////////////////////////////////////////////////////////////////////// +// Stack + +//! A type-unsafe stack for storing different types of data. +/*! \tparam Allocator Allocator for allocating stack memory. +*/ +template +class Stack { +public: + // Optimization note: Do not allocate memory for stack_ in constructor. + // Do it lazily when first Push() -> Expand() -> Resize(). + Stack(Allocator* allocator, size_t stackCapacity) : allocator_(allocator), ownAllocator_(0), stack_(0), stackTop_(0), stackEnd_(0), initialCapacity_(stackCapacity) { + } + +#if RAPIDJSON_HAS_CXX11_RVALUE_REFS + Stack(Stack&& rhs) + : allocator_(rhs.allocator_), + ownAllocator_(rhs.ownAllocator_), + stack_(rhs.stack_), + stackTop_(rhs.stackTop_), + stackEnd_(rhs.stackEnd_), + initialCapacity_(rhs.initialCapacity_) + { + rhs.allocator_ = 0; + rhs.ownAllocator_ = 0; + rhs.stack_ = 0; + rhs.stackTop_ = 0; + rhs.stackEnd_ = 0; + rhs.initialCapacity_ = 0; + } +#endif + + ~Stack() { + Destroy(); + } + +#if RAPIDJSON_HAS_CXX11_RVALUE_REFS + Stack& operator=(Stack&& rhs) { + if (&rhs != this) + { + Destroy(); + + allocator_ = rhs.allocator_; + ownAllocator_ = rhs.ownAllocator_; + stack_ = rhs.stack_; + stackTop_ = rhs.stackTop_; + stackEnd_ = rhs.stackEnd_; + initialCapacity_ = rhs.initialCapacity_; + + rhs.allocator_ = 0; + rhs.ownAllocator_ = 0; + rhs.stack_ = 0; + rhs.stackTop_ = 0; + rhs.stackEnd_ = 0; + rhs.initialCapacity_ = 0; + } + return *this; + } +#endif + + void Swap(Stack& rhs) RAPIDJSON_NOEXCEPT { + internal::Swap(allocator_, rhs.allocator_); + internal::Swap(ownAllocator_, rhs.ownAllocator_); + internal::Swap(stack_, rhs.stack_); + internal::Swap(stackTop_, rhs.stackTop_); + internal::Swap(stackEnd_, rhs.stackEnd_); + internal::Swap(initialCapacity_, rhs.initialCapacity_); + } + + void Clear() { stackTop_ = stack_; } + + void ShrinkToFit() { + if (Empty()) { + // If the stack is empty, completely deallocate the memory. + Allocator::Free(stack_); + stack_ = 0; + stackTop_ = 0; + stackEnd_ = 0; + } + else + Resize(GetSize()); + } + + // Optimization note: try to minimize the size of this function for force inline. + // Expansion is run very infrequently, so it is moved to another (probably non-inline) function. + template + RAPIDJSON_FORCEINLINE void Reserve(size_t count = 1) { + // Expand the stack if needed + if (RAPIDJSON_UNLIKELY(stackTop_ + sizeof(T) * count > stackEnd_)) + Expand(count); + } + + template + RAPIDJSON_FORCEINLINE T* Push(size_t count = 1) { + Reserve(count); + return PushUnsafe(count); + } + + template + RAPIDJSON_FORCEINLINE T* PushUnsafe(size_t count = 1) { + RAPIDJSON_ASSERT(stackTop_ + sizeof(T) * count <= stackEnd_); + T* ret = reinterpret_cast(stackTop_); + stackTop_ += sizeof(T) * count; + return ret; + } + + template + T* Pop(size_t count) { + RAPIDJSON_ASSERT(GetSize() >= count * sizeof(T)); + stackTop_ -= count * sizeof(T); + return reinterpret_cast(stackTop_); + } + + template + T* Top() { + RAPIDJSON_ASSERT(GetSize() >= sizeof(T)); + return reinterpret_cast(stackTop_ - sizeof(T)); + } + + template + const T* Top() const { + RAPIDJSON_ASSERT(GetSize() >= sizeof(T)); + return reinterpret_cast(stackTop_ - sizeof(T)); + } + + template + T* End() { return reinterpret_cast(stackTop_); } + + template + const T* End() const { return reinterpret_cast(stackTop_); } + + template + T* Bottom() { return reinterpret_cast(stack_); } + + template + const T* Bottom() const { return reinterpret_cast(stack_); } + + bool HasAllocator() const { + return allocator_ != 0; + } + + Allocator& GetAllocator() { + RAPIDJSON_ASSERT(allocator_); + return *allocator_; + } + + bool Empty() const { return stackTop_ == stack_; } + size_t GetSize() const { return static_cast(stackTop_ - stack_); } + size_t GetCapacity() const { return static_cast(stackEnd_ - stack_); } + +private: + template + void Expand(size_t count) { + // Only expand the capacity if the current stack exists. Otherwise just create a stack with initial capacity. + size_t newCapacity; + if (stack_ == 0) { + if (!allocator_) + ownAllocator_ = allocator_ = RAPIDJSON_NEW(Allocator()); + newCapacity = initialCapacity_; + } else { + newCapacity = GetCapacity(); + newCapacity += (newCapacity + 1) / 2; + } + size_t newSize = GetSize() + sizeof(T) * count; + if (newCapacity < newSize) + newCapacity = newSize; + + Resize(newCapacity); + } + + void Resize(size_t newCapacity) { + const size_t size = GetSize(); // Backup the current size + stack_ = static_cast(allocator_->Realloc(stack_, GetCapacity(), newCapacity)); + stackTop_ = stack_ + size; + stackEnd_ = stack_ + newCapacity; + } + + void Destroy() { + Allocator::Free(stack_); + RAPIDJSON_DELETE(ownAllocator_); // Only delete if it is owned by the stack + } + + // Prohibit copy constructor & assignment operator. + Stack(const Stack&); + Stack& operator=(const Stack&); + + Allocator* allocator_; + Allocator* ownAllocator_; + char *stack_; + char *stackTop_; + char *stackEnd_; + size_t initialCapacity_; +}; + +} // namespace internal +RAPIDJSON_NAMESPACE_END + +#if defined(__clang__) +RAPIDJSON_DIAG_POP +#endif + +#endif // RAPIDJSON_STACK_H_ diff --git a/include/rapidjson/internal/strfunc.h b/include/rapidjson/internal/strfunc.h new file mode 100644 index 0000000..2edfae5 --- /dev/null +++ b/include/rapidjson/internal/strfunc.h @@ -0,0 +1,55 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_INTERNAL_STRFUNC_H_ +#define RAPIDJSON_INTERNAL_STRFUNC_H_ + +#include "../stream.h" + +RAPIDJSON_NAMESPACE_BEGIN +namespace internal { + +//! Custom strlen() which works on different character types. +/*! \tparam Ch Character type (e.g. char, wchar_t, short) + \param s Null-terminated input string. + \return Number of characters in the string. + \note This has the same semantics as strlen(), the return value is not number of Unicode codepoints. +*/ +template +inline SizeType StrLen(const Ch* s) { + const Ch* p = s; + while (*p) ++p; + return SizeType(p - s); +} + +//! Returns number of code points in a encoded string. +template +bool CountStringCodePoint(const typename Encoding::Ch* s, SizeType length, SizeType* outCount) { + GenericStringStream is(s); + const typename Encoding::Ch* end = s + length; + SizeType count = 0; + while (is.src_ < end) { + unsigned codepoint; + if (!Encoding::Decode(is, &codepoint)) + return false; + count++; + } + *outCount = count; + return true; +} + +} // namespace internal +RAPIDJSON_NAMESPACE_END + +#endif // RAPIDJSON_INTERNAL_STRFUNC_H_ diff --git a/include/rapidjson/internal/strtod.h b/include/rapidjson/internal/strtod.h new file mode 100644 index 0000000..289c413 --- /dev/null +++ b/include/rapidjson/internal/strtod.h @@ -0,0 +1,269 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_STRTOD_ +#define RAPIDJSON_STRTOD_ + +#include "ieee754.h" +#include "biginteger.h" +#include "diyfp.h" +#include "pow10.h" + +RAPIDJSON_NAMESPACE_BEGIN +namespace internal { + +inline double FastPath(double significand, int exp) { + if (exp < -308) + return 0.0; + else if (exp >= 0) + return significand * internal::Pow10(exp); + else + return significand / internal::Pow10(-exp); +} + +inline double StrtodNormalPrecision(double d, int p) { + if (p < -308) { + // Prevent expSum < -308, making Pow10(p) = 0 + d = FastPath(d, -308); + d = FastPath(d, p + 308); + } + else + d = FastPath(d, p); + return d; +} + +template +inline T Min3(T a, T b, T c) { + T m = a; + if (m > b) m = b; + if (m > c) m = c; + return m; +} + +inline int CheckWithinHalfULP(double b, const BigInteger& d, int dExp) { + const Double db(b); + const uint64_t bInt = db.IntegerSignificand(); + const int bExp = db.IntegerExponent(); + const int hExp = bExp - 1; + + int dS_Exp2 = 0, dS_Exp5 = 0, bS_Exp2 = 0, bS_Exp5 = 0, hS_Exp2 = 0, hS_Exp5 = 0; + + // Adjust for decimal exponent + if (dExp >= 0) { + dS_Exp2 += dExp; + dS_Exp5 += dExp; + } + else { + bS_Exp2 -= dExp; + bS_Exp5 -= dExp; + hS_Exp2 -= dExp; + hS_Exp5 -= dExp; + } + + // Adjust for binary exponent + if (bExp >= 0) + bS_Exp2 += bExp; + else { + dS_Exp2 -= bExp; + hS_Exp2 -= bExp; + } + + // Adjust for half ulp exponent + if (hExp >= 0) + hS_Exp2 += hExp; + else { + dS_Exp2 -= hExp; + bS_Exp2 -= hExp; + } + + // Remove common power of two factor from all three scaled values + int common_Exp2 = Min3(dS_Exp2, bS_Exp2, hS_Exp2); + dS_Exp2 -= common_Exp2; + bS_Exp2 -= common_Exp2; + hS_Exp2 -= common_Exp2; + + BigInteger dS = d; + dS.MultiplyPow5(static_cast(dS_Exp5)) <<= static_cast(dS_Exp2); + + BigInteger bS(bInt); + bS.MultiplyPow5(static_cast(bS_Exp5)) <<= static_cast(bS_Exp2); + + BigInteger hS(1); + hS.MultiplyPow5(static_cast(hS_Exp5)) <<= static_cast(hS_Exp2); + + BigInteger delta(0); + dS.Difference(bS, &delta); + + return delta.Compare(hS); +} + +inline bool StrtodFast(double d, int p, double* result) { + // Use fast path for string-to-double conversion if possible + // see http://www.exploringbinary.com/fast-path-decimal-to-floating-point-conversion/ + if (p > 22 && p < 22 + 16) { + // Fast Path Cases In Disguise + d *= internal::Pow10(p - 22); + p = 22; + } + + if (p >= -22 && p <= 22 && d <= 9007199254740991.0) { // 2^53 - 1 + *result = FastPath(d, p); + return true; + } + else + return false; +} + +// Compute an approximation and see if it is within 1/2 ULP +inline bool StrtodDiyFp(const char* decimals, size_t length, size_t decimalPosition, int exp, double* result) { + uint64_t significand = 0; + size_t i = 0; // 2^64 - 1 = 18446744073709551615, 1844674407370955161 = 0x1999999999999999 + for (; i < length; i++) { + if (significand > RAPIDJSON_UINT64_C2(0x19999999, 0x99999999) || + (significand == RAPIDJSON_UINT64_C2(0x19999999, 0x99999999) && decimals[i] > '5')) + break; + significand = significand * 10u + static_cast(decimals[i] - '0'); + } + + if (i < length && decimals[i] >= '5') // Rounding + significand++; + + size_t remaining = length - i; + const unsigned kUlpShift = 3; + const unsigned kUlp = 1 << kUlpShift; + int64_t error = (remaining == 0) ? 0 : kUlp / 2; + + DiyFp v(significand, 0); + v = v.Normalize(); + error <<= -v.e; + + const int dExp = static_cast(decimalPosition) - static_cast(i) + exp; + + int actualExp; + DiyFp cachedPower = GetCachedPower10(dExp, &actualExp); + if (actualExp != dExp) { + static const DiyFp kPow10[] = { + DiyFp(RAPIDJSON_UINT64_C2(0xa0000000, 00000000), -60), // 10^1 + DiyFp(RAPIDJSON_UINT64_C2(0xc8000000, 00000000), -57), // 10^2 + DiyFp(RAPIDJSON_UINT64_C2(0xfa000000, 00000000), -54), // 10^3 + DiyFp(RAPIDJSON_UINT64_C2(0x9c400000, 00000000), -50), // 10^4 + DiyFp(RAPIDJSON_UINT64_C2(0xc3500000, 00000000), -47), // 10^5 + DiyFp(RAPIDJSON_UINT64_C2(0xf4240000, 00000000), -44), // 10^6 + DiyFp(RAPIDJSON_UINT64_C2(0x98968000, 00000000), -40) // 10^7 + }; + int adjustment = dExp - actualExp - 1; + RAPIDJSON_ASSERT(adjustment >= 0 && adjustment < 7); + v = v * kPow10[adjustment]; + if (length + static_cast(adjustment)> 19u) // has more digits than decimal digits in 64-bit + error += kUlp / 2; + } + + v = v * cachedPower; + + error += kUlp + (error == 0 ? 0 : 1); + + const int oldExp = v.e; + v = v.Normalize(); + error <<= oldExp - v.e; + + const unsigned effectiveSignificandSize = Double::EffectiveSignificandSize(64 + v.e); + unsigned precisionSize = 64 - effectiveSignificandSize; + if (precisionSize + kUlpShift >= 64) { + unsigned scaleExp = (precisionSize + kUlpShift) - 63; + v.f >>= scaleExp; + v.e += scaleExp; + error = (error >> scaleExp) + 1 + static_cast(kUlp); + precisionSize -= scaleExp; + } + + DiyFp rounded(v.f >> precisionSize, v.e + static_cast(precisionSize)); + const uint64_t precisionBits = (v.f & ((uint64_t(1) << precisionSize) - 1)) * kUlp; + const uint64_t halfWay = (uint64_t(1) << (precisionSize - 1)) * kUlp; + if (precisionBits >= halfWay + static_cast(error)) { + rounded.f++; + if (rounded.f & (DiyFp::kDpHiddenBit << 1)) { // rounding overflows mantissa (issue #340) + rounded.f >>= 1; + rounded.e++; + } + } + + *result = rounded.ToDouble(); + + return halfWay - static_cast(error) >= precisionBits || precisionBits >= halfWay + static_cast(error); +} + +inline double StrtodBigInteger(double approx, const char* decimals, size_t length, size_t decimalPosition, int exp) { + const BigInteger dInt(decimals, length); + const int dExp = static_cast(decimalPosition) - static_cast(length) + exp; + Double a(approx); + int cmp = CheckWithinHalfULP(a.Value(), dInt, dExp); + if (cmp < 0) + return a.Value(); // within half ULP + else if (cmp == 0) { + // Round towards even + if (a.Significand() & 1) + return a.NextPositiveDouble(); + else + return a.Value(); + } + else // adjustment + return a.NextPositiveDouble(); +} + +inline double StrtodFullPrecision(double d, int p, const char* decimals, size_t length, size_t decimalPosition, int exp) { + RAPIDJSON_ASSERT(d >= 0.0); + RAPIDJSON_ASSERT(length >= 1); + + double result; + if (StrtodFast(d, p, &result)) + return result; + + // Trim leading zeros + while (*decimals == '0' && length > 1) { + length--; + decimals++; + decimalPosition--; + } + + // Trim trailing zeros + while (decimals[length - 1] == '0' && length > 1) { + length--; + decimalPosition--; + exp++; + } + + // Trim right-most digits + const int kMaxDecimalDigit = 780; + if (static_cast(length) > kMaxDecimalDigit) { + int delta = (static_cast(length) - kMaxDecimalDigit); + exp += delta; + decimalPosition -= static_cast(delta); + length = kMaxDecimalDigit; + } + + // If too small, underflow to zero + if (int(length) + exp < -324) + return 0.0; + + if (StrtodDiyFp(decimals, length, decimalPosition, exp, &result)) + return result; + + // Use approximation from StrtodDiyFp and make adjustment with BigInteger comparison + return StrtodBigInteger(result, decimals, length, decimalPosition, exp); +} + +} // namespace internal +RAPIDJSON_NAMESPACE_END + +#endif // RAPIDJSON_STRTOD_ diff --git a/include/rapidjson/internal/swap.h b/include/rapidjson/internal/swap.h new file mode 100644 index 0000000..666e49f --- /dev/null +++ b/include/rapidjson/internal/swap.h @@ -0,0 +1,46 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_INTERNAL_SWAP_H_ +#define RAPIDJSON_INTERNAL_SWAP_H_ + +#include "../rapidjson.h" + +#if defined(__clang__) +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(c++98-compat) +#endif + +RAPIDJSON_NAMESPACE_BEGIN +namespace internal { + +//! Custom swap() to avoid dependency on C++ header +/*! \tparam T Type of the arguments to swap, should be instantiated with primitive C++ types only. + \note This has the same semantics as std::swap(). +*/ +template +inline void Swap(T& a, T& b) RAPIDJSON_NOEXCEPT { + T tmp = a; + a = b; + b = tmp; +} + +} // namespace internal +RAPIDJSON_NAMESPACE_END + +#if defined(__clang__) +RAPIDJSON_DIAG_POP +#endif + +#endif // RAPIDJSON_INTERNAL_SWAP_H_ diff --git a/include/rapidjson/istreamwrapper.h b/include/rapidjson/istreamwrapper.h new file mode 100644 index 0000000..f5fe289 --- /dev/null +++ b/include/rapidjson/istreamwrapper.h @@ -0,0 +1,115 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_ISTREAMWRAPPER_H_ +#define RAPIDJSON_ISTREAMWRAPPER_H_ + +#include "stream.h" +#include + +#ifdef __clang__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(padded) +#endif + +#ifdef _MSC_VER +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(4351) // new behavior: elements of array 'array' will be default initialized +#endif + +RAPIDJSON_NAMESPACE_BEGIN + +//! Wrapper of \c std::basic_istream into RapidJSON's Stream concept. +/*! + The classes can be wrapped including but not limited to: + + - \c std::istringstream + - \c std::stringstream + - \c std::wistringstream + - \c std::wstringstream + - \c std::ifstream + - \c std::fstream + - \c std::wifstream + - \c std::wfstream + + \tparam StreamType Class derived from \c std::basic_istream. +*/ + +template +class BasicIStreamWrapper { +public: + typedef typename StreamType::char_type Ch; + BasicIStreamWrapper(StreamType& stream) : stream_(stream), count_(), peekBuffer_() {} + + Ch Peek() const { + typename StreamType::int_type c = stream_.peek(); + return RAPIDJSON_LIKELY(c != StreamType::traits_type::eof()) ? static_cast(c) : '\0'; + } + + Ch Take() { + typename StreamType::int_type c = stream_.get(); + if (RAPIDJSON_LIKELY(c != StreamType::traits_type::eof())) { + count_++; + return static_cast(c); + } + else + return '\0'; + } + + // tellg() may return -1 when failed. So we count by ourself. + size_t Tell() const { return count_; } + + Ch* PutBegin() { RAPIDJSON_ASSERT(false); return 0; } + void Put(Ch) { RAPIDJSON_ASSERT(false); } + void Flush() { RAPIDJSON_ASSERT(false); } + size_t PutEnd(Ch*) { RAPIDJSON_ASSERT(false); return 0; } + + // For encoding detection only. + const Ch* Peek4() const { + RAPIDJSON_ASSERT(sizeof(Ch) == 1); // Only usable for byte stream. + int i; + bool hasError = false; + for (i = 0; i < 4; ++i) { + typename StreamType::int_type c = stream_.get(); + if (c == StreamType::traits_type::eof()) { + hasError = true; + stream_.clear(); + break; + } + peekBuffer_[i] = static_cast(c); + } + for (--i; i >= 0; --i) + stream_.putback(peekBuffer_[i]); + return !hasError ? peekBuffer_ : 0; + } + +private: + BasicIStreamWrapper(const BasicIStreamWrapper&); + BasicIStreamWrapper& operator=(const BasicIStreamWrapper&); + + StreamType& stream_; + size_t count_; //!< Number of characters read. Note: + mutable Ch peekBuffer_[4]; +}; + +typedef BasicIStreamWrapper IStreamWrapper; +typedef BasicIStreamWrapper WIStreamWrapper; + +#if defined(__clang__) || defined(_MSC_VER) +RAPIDJSON_DIAG_POP +#endif + +RAPIDJSON_NAMESPACE_END + +#endif // RAPIDJSON_ISTREAMWRAPPER_H_ diff --git a/include/rapidjson/memorybuffer.h b/include/rapidjson/memorybuffer.h new file mode 100644 index 0000000..39bee1d --- /dev/null +++ b/include/rapidjson/memorybuffer.h @@ -0,0 +1,70 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_MEMORYBUFFER_H_ +#define RAPIDJSON_MEMORYBUFFER_H_ + +#include "stream.h" +#include "internal/stack.h" + +RAPIDJSON_NAMESPACE_BEGIN + +//! Represents an in-memory output byte stream. +/*! + This class is mainly for being wrapped by EncodedOutputStream or AutoUTFOutputStream. + + It is similar to FileWriteBuffer but the destination is an in-memory buffer instead of a file. + + Differences between MemoryBuffer and StringBuffer: + 1. StringBuffer has Encoding but MemoryBuffer is only a byte buffer. + 2. StringBuffer::GetString() returns a null-terminated string. MemoryBuffer::GetBuffer() returns a buffer without terminator. + + \tparam Allocator type for allocating memory buffer. + \note implements Stream concept +*/ +template +struct GenericMemoryBuffer { + typedef char Ch; // byte + + GenericMemoryBuffer(Allocator* allocator = 0, size_t capacity = kDefaultCapacity) : stack_(allocator, capacity) {} + + void Put(Ch c) { *stack_.template Push() = c; } + void Flush() {} + + void Clear() { stack_.Clear(); } + void ShrinkToFit() { stack_.ShrinkToFit(); } + Ch* Push(size_t count) { return stack_.template Push(count); } + void Pop(size_t count) { stack_.template Pop(count); } + + const Ch* GetBuffer() const { + return stack_.template Bottom(); + } + + size_t GetSize() const { return stack_.GetSize(); } + + static const size_t kDefaultCapacity = 256; + mutable internal::Stack stack_; +}; + +typedef GenericMemoryBuffer<> MemoryBuffer; + +//! Implement specialized version of PutN() with memset() for better performance. +template<> +inline void PutN(MemoryBuffer& memoryBuffer, char c, size_t n) { + std::memset(memoryBuffer.stack_.Push(n), c, n * sizeof(c)); +} + +RAPIDJSON_NAMESPACE_END + +#endif // RAPIDJSON_MEMORYBUFFER_H_ diff --git a/include/rapidjson/memorystream.h b/include/rapidjson/memorystream.h new file mode 100644 index 0000000..1d71d8a --- /dev/null +++ b/include/rapidjson/memorystream.h @@ -0,0 +1,71 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_MEMORYSTREAM_H_ +#define RAPIDJSON_MEMORYSTREAM_H_ + +#include "stream.h" + +#ifdef __clang__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(unreachable-code) +RAPIDJSON_DIAG_OFF(missing-noreturn) +#endif + +RAPIDJSON_NAMESPACE_BEGIN + +//! Represents an in-memory input byte stream. +/*! + This class is mainly for being wrapped by EncodedInputStream or AutoUTFInputStream. + + It is similar to FileReadBuffer but the source is an in-memory buffer instead of a file. + + Differences between MemoryStream and StringStream: + 1. StringStream has encoding but MemoryStream is a byte stream. + 2. MemoryStream needs size of the source buffer and the buffer don't need to be null terminated. StringStream assume null-terminated string as source. + 3. MemoryStream supports Peek4() for encoding detection. StringStream is specified with an encoding so it should not have Peek4(). + \note implements Stream concept +*/ +struct MemoryStream { + typedef char Ch; // byte + + MemoryStream(const Ch *src, size_t size) : src_(src), begin_(src), end_(src + size), size_(size) {} + + Ch Peek() const { return RAPIDJSON_UNLIKELY(src_ == end_) ? '\0' : *src_; } + Ch Take() { return RAPIDJSON_UNLIKELY(src_ == end_) ? '\0' : *src_++; } + size_t Tell() const { return static_cast(src_ - begin_); } + + Ch* PutBegin() { RAPIDJSON_ASSERT(false); return 0; } + void Put(Ch) { RAPIDJSON_ASSERT(false); } + void Flush() { RAPIDJSON_ASSERT(false); } + size_t PutEnd(Ch*) { RAPIDJSON_ASSERT(false); return 0; } + + // For encoding detection only. + const Ch* Peek4() const { + return Tell() + 4 <= size_ ? src_ : 0; + } + + const Ch* src_; //!< Current read position. + const Ch* begin_; //!< Original head of the string. + const Ch* end_; //!< End of stream. + size_t size_; //!< Size of the stream. +}; + +RAPIDJSON_NAMESPACE_END + +#ifdef __clang__ +RAPIDJSON_DIAG_POP +#endif + +#endif // RAPIDJSON_MEMORYBUFFER_H_ diff --git a/include/rapidjson/msinttypes/inttypes.h b/include/rapidjson/msinttypes/inttypes.h new file mode 100644 index 0000000..1811128 --- /dev/null +++ b/include/rapidjson/msinttypes/inttypes.h @@ -0,0 +1,316 @@ +// ISO C9x compliant inttypes.h for Microsoft Visual Studio +// Based on ISO/IEC 9899:TC2 Committee draft (May 6, 2005) WG14/N1124 +// +// Copyright (c) 2006-2013 Alexander Chemeris +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// +// 3. Neither the name of the product nor the names of its contributors may +// be used to endorse or promote products derived from this software +// without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED +// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +// EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +// OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +/////////////////////////////////////////////////////////////////////////////// + +// The above software in this distribution may have been modified by +// THL A29 Limited ("Tencent Modifications"). +// All Tencent Modifications are Copyright (C) 2015 THL A29 Limited. + +#ifndef _MSC_VER // [ +#error "Use this header only with Microsoft Visual C++ compilers!" +#endif // _MSC_VER ] + +#ifndef _MSC_INTTYPES_H_ // [ +#define _MSC_INTTYPES_H_ + +#if _MSC_VER > 1000 +#pragma once +#endif + +#include "stdint.h" + +// miloyip: VC supports inttypes.h since VC2013 +#if _MSC_VER >= 1800 +#include +#else + +// 7.8 Format conversion of integer types + +typedef struct { + intmax_t quot; + intmax_t rem; +} imaxdiv_t; + +// 7.8.1 Macros for format specifiers + +#if !defined(__cplusplus) || defined(__STDC_FORMAT_MACROS) // [ See footnote 185 at page 198 + +// The fprintf macros for signed integers are: +#define PRId8 "d" +#define PRIi8 "i" +#define PRIdLEAST8 "d" +#define PRIiLEAST8 "i" +#define PRIdFAST8 "d" +#define PRIiFAST8 "i" + +#define PRId16 "hd" +#define PRIi16 "hi" +#define PRIdLEAST16 "hd" +#define PRIiLEAST16 "hi" +#define PRIdFAST16 "hd" +#define PRIiFAST16 "hi" + +#define PRId32 "I32d" +#define PRIi32 "I32i" +#define PRIdLEAST32 "I32d" +#define PRIiLEAST32 "I32i" +#define PRIdFAST32 "I32d" +#define PRIiFAST32 "I32i" + +#define PRId64 "I64d" +#define PRIi64 "I64i" +#define PRIdLEAST64 "I64d" +#define PRIiLEAST64 "I64i" +#define PRIdFAST64 "I64d" +#define PRIiFAST64 "I64i" + +#define PRIdMAX "I64d" +#define PRIiMAX "I64i" + +#define PRIdPTR "Id" +#define PRIiPTR "Ii" + +// The fprintf macros for unsigned integers are: +#define PRIo8 "o" +#define PRIu8 "u" +#define PRIx8 "x" +#define PRIX8 "X" +#define PRIoLEAST8 "o" +#define PRIuLEAST8 "u" +#define PRIxLEAST8 "x" +#define PRIXLEAST8 "X" +#define PRIoFAST8 "o" +#define PRIuFAST8 "u" +#define PRIxFAST8 "x" +#define PRIXFAST8 "X" + +#define PRIo16 "ho" +#define PRIu16 "hu" +#define PRIx16 "hx" +#define PRIX16 "hX" +#define PRIoLEAST16 "ho" +#define PRIuLEAST16 "hu" +#define PRIxLEAST16 "hx" +#define PRIXLEAST16 "hX" +#define PRIoFAST16 "ho" +#define PRIuFAST16 "hu" +#define PRIxFAST16 "hx" +#define PRIXFAST16 "hX" + +#define PRIo32 "I32o" +#define PRIu32 "I32u" +#define PRIx32 "I32x" +#define PRIX32 "I32X" +#define PRIoLEAST32 "I32o" +#define PRIuLEAST32 "I32u" +#define PRIxLEAST32 "I32x" +#define PRIXLEAST32 "I32X" +#define PRIoFAST32 "I32o" +#define PRIuFAST32 "I32u" +#define PRIxFAST32 "I32x" +#define PRIXFAST32 "I32X" + +#define PRIo64 "I64o" +#define PRIu64 "I64u" +#define PRIx64 "I64x" +#define PRIX64 "I64X" +#define PRIoLEAST64 "I64o" +#define PRIuLEAST64 "I64u" +#define PRIxLEAST64 "I64x" +#define PRIXLEAST64 "I64X" +#define PRIoFAST64 "I64o" +#define PRIuFAST64 "I64u" +#define PRIxFAST64 "I64x" +#define PRIXFAST64 "I64X" + +#define PRIoMAX "I64o" +#define PRIuMAX "I64u" +#define PRIxMAX "I64x" +#define PRIXMAX "I64X" + +#define PRIoPTR "Io" +#define PRIuPTR "Iu" +#define PRIxPTR "Ix" +#define PRIXPTR "IX" + +// The fscanf macros for signed integers are: +#define SCNd8 "d" +#define SCNi8 "i" +#define SCNdLEAST8 "d" +#define SCNiLEAST8 "i" +#define SCNdFAST8 "d" +#define SCNiFAST8 "i" + +#define SCNd16 "hd" +#define SCNi16 "hi" +#define SCNdLEAST16 "hd" +#define SCNiLEAST16 "hi" +#define SCNdFAST16 "hd" +#define SCNiFAST16 "hi" + +#define SCNd32 "ld" +#define SCNi32 "li" +#define SCNdLEAST32 "ld" +#define SCNiLEAST32 "li" +#define SCNdFAST32 "ld" +#define SCNiFAST32 "li" + +#define SCNd64 "I64d" +#define SCNi64 "I64i" +#define SCNdLEAST64 "I64d" +#define SCNiLEAST64 "I64i" +#define SCNdFAST64 "I64d" +#define SCNiFAST64 "I64i" + +#define SCNdMAX "I64d" +#define SCNiMAX "I64i" + +#ifdef _WIN64 // [ +# define SCNdPTR "I64d" +# define SCNiPTR "I64i" +#else // _WIN64 ][ +# define SCNdPTR "ld" +# define SCNiPTR "li" +#endif // _WIN64 ] + +// The fscanf macros for unsigned integers are: +#define SCNo8 "o" +#define SCNu8 "u" +#define SCNx8 "x" +#define SCNX8 "X" +#define SCNoLEAST8 "o" +#define SCNuLEAST8 "u" +#define SCNxLEAST8 "x" +#define SCNXLEAST8 "X" +#define SCNoFAST8 "o" +#define SCNuFAST8 "u" +#define SCNxFAST8 "x" +#define SCNXFAST8 "X" + +#define SCNo16 "ho" +#define SCNu16 "hu" +#define SCNx16 "hx" +#define SCNX16 "hX" +#define SCNoLEAST16 "ho" +#define SCNuLEAST16 "hu" +#define SCNxLEAST16 "hx" +#define SCNXLEAST16 "hX" +#define SCNoFAST16 "ho" +#define SCNuFAST16 "hu" +#define SCNxFAST16 "hx" +#define SCNXFAST16 "hX" + +#define SCNo32 "lo" +#define SCNu32 "lu" +#define SCNx32 "lx" +#define SCNX32 "lX" +#define SCNoLEAST32 "lo" +#define SCNuLEAST32 "lu" +#define SCNxLEAST32 "lx" +#define SCNXLEAST32 "lX" +#define SCNoFAST32 "lo" +#define SCNuFAST32 "lu" +#define SCNxFAST32 "lx" +#define SCNXFAST32 "lX" + +#define SCNo64 "I64o" +#define SCNu64 "I64u" +#define SCNx64 "I64x" +#define SCNX64 "I64X" +#define SCNoLEAST64 "I64o" +#define SCNuLEAST64 "I64u" +#define SCNxLEAST64 "I64x" +#define SCNXLEAST64 "I64X" +#define SCNoFAST64 "I64o" +#define SCNuFAST64 "I64u" +#define SCNxFAST64 "I64x" +#define SCNXFAST64 "I64X" + +#define SCNoMAX "I64o" +#define SCNuMAX "I64u" +#define SCNxMAX "I64x" +#define SCNXMAX "I64X" + +#ifdef _WIN64 // [ +# define SCNoPTR "I64o" +# define SCNuPTR "I64u" +# define SCNxPTR "I64x" +# define SCNXPTR "I64X" +#else // _WIN64 ][ +# define SCNoPTR "lo" +# define SCNuPTR "lu" +# define SCNxPTR "lx" +# define SCNXPTR "lX" +#endif // _WIN64 ] + +#endif // __STDC_FORMAT_MACROS ] + +// 7.8.2 Functions for greatest-width integer types + +// 7.8.2.1 The imaxabs function +#define imaxabs _abs64 + +// 7.8.2.2 The imaxdiv function + +// This is modified version of div() function from Microsoft's div.c found +// in %MSVC.NET%\crt\src\div.c +#ifdef STATIC_IMAXDIV // [ +static +#else // STATIC_IMAXDIV ][ +_inline +#endif // STATIC_IMAXDIV ] +imaxdiv_t __cdecl imaxdiv(intmax_t numer, intmax_t denom) +{ + imaxdiv_t result; + + result.quot = numer / denom; + result.rem = numer % denom; + + if (numer < 0 && result.rem > 0) { + // did division wrong; must fix up + ++result.quot; + result.rem -= denom; + } + + return result; +} + +// 7.8.2.3 The strtoimax and strtoumax functions +#define strtoimax _strtoi64 +#define strtoumax _strtoui64 + +// 7.8.2.4 The wcstoimax and wcstoumax functions +#define wcstoimax _wcstoi64 +#define wcstoumax _wcstoui64 + +#endif // _MSC_VER >= 1800 + +#endif // _MSC_INTTYPES_H_ ] diff --git a/include/rapidjson/msinttypes/stdint.h b/include/rapidjson/msinttypes/stdint.h new file mode 100644 index 0000000..3d4477b --- /dev/null +++ b/include/rapidjson/msinttypes/stdint.h @@ -0,0 +1,300 @@ +// ISO C9x compliant stdint.h for Microsoft Visual Studio +// Based on ISO/IEC 9899:TC2 Committee draft (May 6, 2005) WG14/N1124 +// +// Copyright (c) 2006-2013 Alexander Chemeris +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// +// 3. Neither the name of the product nor the names of its contributors may +// be used to endorse or promote products derived from this software +// without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED +// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +// EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +// OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +/////////////////////////////////////////////////////////////////////////////// + +// The above software in this distribution may have been modified by +// THL A29 Limited ("Tencent Modifications"). +// All Tencent Modifications are Copyright (C) 2015 THL A29 Limited. + +#ifndef _MSC_VER // [ +#error "Use this header only with Microsoft Visual C++ compilers!" +#endif // _MSC_VER ] + +#ifndef _MSC_STDINT_H_ // [ +#define _MSC_STDINT_H_ + +#if _MSC_VER > 1000 +#pragma once +#endif + +// miloyip: Originally Visual Studio 2010 uses its own stdint.h. However it generates warning with INT64_C(), so change to use this file for vs2010. +#if _MSC_VER >= 1600 // [ +#include + +#if !defined(__cplusplus) || defined(__STDC_CONSTANT_MACROS) // [ See footnote 224 at page 260 + +#undef INT8_C +#undef INT16_C +#undef INT32_C +#undef INT64_C +#undef UINT8_C +#undef UINT16_C +#undef UINT32_C +#undef UINT64_C + +// 7.18.4.1 Macros for minimum-width integer constants + +#define INT8_C(val) val##i8 +#define INT16_C(val) val##i16 +#define INT32_C(val) val##i32 +#define INT64_C(val) val##i64 + +#define UINT8_C(val) val##ui8 +#define UINT16_C(val) val##ui16 +#define UINT32_C(val) val##ui32 +#define UINT64_C(val) val##ui64 + +// 7.18.4.2 Macros for greatest-width integer constants +// These #ifndef's are needed to prevent collisions with . +// Check out Issue 9 for the details. +#ifndef INTMAX_C // [ +# define INTMAX_C INT64_C +#endif // INTMAX_C ] +#ifndef UINTMAX_C // [ +# define UINTMAX_C UINT64_C +#endif // UINTMAX_C ] + +#endif // __STDC_CONSTANT_MACROS ] + +#else // ] _MSC_VER >= 1700 [ + +#include + +// For Visual Studio 6 in C++ mode and for many Visual Studio versions when +// compiling for ARM we have to wrap include with 'extern "C++" {}' +// or compiler would give many errors like this: +// error C2733: second C linkage of overloaded function 'wmemchr' not allowed +#if defined(__cplusplus) && !defined(_M_ARM) +extern "C" { +#endif +# include +#if defined(__cplusplus) && !defined(_M_ARM) +} +#endif + +// Define _W64 macros to mark types changing their size, like intptr_t. +#ifndef _W64 +# if !defined(__midl) && (defined(_X86_) || defined(_M_IX86)) && _MSC_VER >= 1300 +# define _W64 __w64 +# else +# define _W64 +# endif +#endif + + +// 7.18.1 Integer types + +// 7.18.1.1 Exact-width integer types + +// Visual Studio 6 and Embedded Visual C++ 4 doesn't +// realize that, e.g. char has the same size as __int8 +// so we give up on __intX for them. +#if (_MSC_VER < 1300) + typedef signed char int8_t; + typedef signed short int16_t; + typedef signed int int32_t; + typedef unsigned char uint8_t; + typedef unsigned short uint16_t; + typedef unsigned int uint32_t; +#else + typedef signed __int8 int8_t; + typedef signed __int16 int16_t; + typedef signed __int32 int32_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; +#endif +typedef signed __int64 int64_t; +typedef unsigned __int64 uint64_t; + + +// 7.18.1.2 Minimum-width integer types +typedef int8_t int_least8_t; +typedef int16_t int_least16_t; +typedef int32_t int_least32_t; +typedef int64_t int_least64_t; +typedef uint8_t uint_least8_t; +typedef uint16_t uint_least16_t; +typedef uint32_t uint_least32_t; +typedef uint64_t uint_least64_t; + +// 7.18.1.3 Fastest minimum-width integer types +typedef int8_t int_fast8_t; +typedef int16_t int_fast16_t; +typedef int32_t int_fast32_t; +typedef int64_t int_fast64_t; +typedef uint8_t uint_fast8_t; +typedef uint16_t uint_fast16_t; +typedef uint32_t uint_fast32_t; +typedef uint64_t uint_fast64_t; + +// 7.18.1.4 Integer types capable of holding object pointers +#ifdef _WIN64 // [ + typedef signed __int64 intptr_t; + typedef unsigned __int64 uintptr_t; +#else // _WIN64 ][ + typedef _W64 signed int intptr_t; + typedef _W64 unsigned int uintptr_t; +#endif // _WIN64 ] + +// 7.18.1.5 Greatest-width integer types +typedef int64_t intmax_t; +typedef uint64_t uintmax_t; + + +// 7.18.2 Limits of specified-width integer types + +#if !defined(__cplusplus) || defined(__STDC_LIMIT_MACROS) // [ See footnote 220 at page 257 and footnote 221 at page 259 + +// 7.18.2.1 Limits of exact-width integer types +#define INT8_MIN ((int8_t)_I8_MIN) +#define INT8_MAX _I8_MAX +#define INT16_MIN ((int16_t)_I16_MIN) +#define INT16_MAX _I16_MAX +#define INT32_MIN ((int32_t)_I32_MIN) +#define INT32_MAX _I32_MAX +#define INT64_MIN ((int64_t)_I64_MIN) +#define INT64_MAX _I64_MAX +#define UINT8_MAX _UI8_MAX +#define UINT16_MAX _UI16_MAX +#define UINT32_MAX _UI32_MAX +#define UINT64_MAX _UI64_MAX + +// 7.18.2.2 Limits of minimum-width integer types +#define INT_LEAST8_MIN INT8_MIN +#define INT_LEAST8_MAX INT8_MAX +#define INT_LEAST16_MIN INT16_MIN +#define INT_LEAST16_MAX INT16_MAX +#define INT_LEAST32_MIN INT32_MIN +#define INT_LEAST32_MAX INT32_MAX +#define INT_LEAST64_MIN INT64_MIN +#define INT_LEAST64_MAX INT64_MAX +#define UINT_LEAST8_MAX UINT8_MAX +#define UINT_LEAST16_MAX UINT16_MAX +#define UINT_LEAST32_MAX UINT32_MAX +#define UINT_LEAST64_MAX UINT64_MAX + +// 7.18.2.3 Limits of fastest minimum-width integer types +#define INT_FAST8_MIN INT8_MIN +#define INT_FAST8_MAX INT8_MAX +#define INT_FAST16_MIN INT16_MIN +#define INT_FAST16_MAX INT16_MAX +#define INT_FAST32_MIN INT32_MIN +#define INT_FAST32_MAX INT32_MAX +#define INT_FAST64_MIN INT64_MIN +#define INT_FAST64_MAX INT64_MAX +#define UINT_FAST8_MAX UINT8_MAX +#define UINT_FAST16_MAX UINT16_MAX +#define UINT_FAST32_MAX UINT32_MAX +#define UINT_FAST64_MAX UINT64_MAX + +// 7.18.2.4 Limits of integer types capable of holding object pointers +#ifdef _WIN64 // [ +# define INTPTR_MIN INT64_MIN +# define INTPTR_MAX INT64_MAX +# define UINTPTR_MAX UINT64_MAX +#else // _WIN64 ][ +# define INTPTR_MIN INT32_MIN +# define INTPTR_MAX INT32_MAX +# define UINTPTR_MAX UINT32_MAX +#endif // _WIN64 ] + +// 7.18.2.5 Limits of greatest-width integer types +#define INTMAX_MIN INT64_MIN +#define INTMAX_MAX INT64_MAX +#define UINTMAX_MAX UINT64_MAX + +// 7.18.3 Limits of other integer types + +#ifdef _WIN64 // [ +# define PTRDIFF_MIN _I64_MIN +# define PTRDIFF_MAX _I64_MAX +#else // _WIN64 ][ +# define PTRDIFF_MIN _I32_MIN +# define PTRDIFF_MAX _I32_MAX +#endif // _WIN64 ] + +#define SIG_ATOMIC_MIN INT_MIN +#define SIG_ATOMIC_MAX INT_MAX + +#ifndef SIZE_MAX // [ +# ifdef _WIN64 // [ +# define SIZE_MAX _UI64_MAX +# else // _WIN64 ][ +# define SIZE_MAX _UI32_MAX +# endif // _WIN64 ] +#endif // SIZE_MAX ] + +// WCHAR_MIN and WCHAR_MAX are also defined in +#ifndef WCHAR_MIN // [ +# define WCHAR_MIN 0 +#endif // WCHAR_MIN ] +#ifndef WCHAR_MAX // [ +# define WCHAR_MAX _UI16_MAX +#endif // WCHAR_MAX ] + +#define WINT_MIN 0 +#define WINT_MAX _UI16_MAX + +#endif // __STDC_LIMIT_MACROS ] + + +// 7.18.4 Limits of other integer types + +#if !defined(__cplusplus) || defined(__STDC_CONSTANT_MACROS) // [ See footnote 224 at page 260 + +// 7.18.4.1 Macros for minimum-width integer constants + +#define INT8_C(val) val##i8 +#define INT16_C(val) val##i16 +#define INT32_C(val) val##i32 +#define INT64_C(val) val##i64 + +#define UINT8_C(val) val##ui8 +#define UINT16_C(val) val##ui16 +#define UINT32_C(val) val##ui32 +#define UINT64_C(val) val##ui64 + +// 7.18.4.2 Macros for greatest-width integer constants +// These #ifndef's are needed to prevent collisions with . +// Check out Issue 9 for the details. +#ifndef INTMAX_C // [ +# define INTMAX_C INT64_C +#endif // INTMAX_C ] +#ifndef UINTMAX_C // [ +# define UINTMAX_C UINT64_C +#endif // UINTMAX_C ] + +#endif // __STDC_CONSTANT_MACROS ] + +#endif // _MSC_VER >= 1600 ] + +#endif // _MSC_STDINT_H_ ] diff --git a/include/rapidjson/ostreamwrapper.h b/include/rapidjson/ostreamwrapper.h new file mode 100644 index 0000000..6f4667c --- /dev/null +++ b/include/rapidjson/ostreamwrapper.h @@ -0,0 +1,81 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_OSTREAMWRAPPER_H_ +#define RAPIDJSON_OSTREAMWRAPPER_H_ + +#include "stream.h" +#include + +#ifdef __clang__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(padded) +#endif + +RAPIDJSON_NAMESPACE_BEGIN + +//! Wrapper of \c std::basic_ostream into RapidJSON's Stream concept. +/*! + The classes can be wrapped including but not limited to: + + - \c std::ostringstream + - \c std::stringstream + - \c std::wpstringstream + - \c std::wstringstream + - \c std::ifstream + - \c std::fstream + - \c std::wofstream + - \c std::wfstream + + \tparam StreamType Class derived from \c std::basic_ostream. +*/ + +template +class BasicOStreamWrapper { +public: + typedef typename StreamType::char_type Ch; + BasicOStreamWrapper(StreamType& stream) : stream_(stream) {} + + void Put(Ch c) { + stream_.put(c); + } + + void Flush() { + stream_.flush(); + } + + // Not implemented + char Peek() const { RAPIDJSON_ASSERT(false); return 0; } + char Take() { RAPIDJSON_ASSERT(false); return 0; } + size_t Tell() const { RAPIDJSON_ASSERT(false); return 0; } + char* PutBegin() { RAPIDJSON_ASSERT(false); return 0; } + size_t PutEnd(char*) { RAPIDJSON_ASSERT(false); return 0; } + +private: + BasicOStreamWrapper(const BasicOStreamWrapper&); + BasicOStreamWrapper& operator=(const BasicOStreamWrapper&); + + StreamType& stream_; +}; + +typedef BasicOStreamWrapper OStreamWrapper; +typedef BasicOStreamWrapper WOStreamWrapper; + +#ifdef __clang__ +RAPIDJSON_DIAG_POP +#endif + +RAPIDJSON_NAMESPACE_END + +#endif // RAPIDJSON_OSTREAMWRAPPER_H_ diff --git a/include/rapidjson/pointer.h b/include/rapidjson/pointer.h new file mode 100644 index 0000000..0206ac1 --- /dev/null +++ b/include/rapidjson/pointer.h @@ -0,0 +1,1358 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_POINTER_H_ +#define RAPIDJSON_POINTER_H_ + +#include "document.h" +#include "internal/itoa.h" + +#ifdef __clang__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(switch-enum) +#endif + +#ifdef _MSC_VER +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(4512) // assignment operator could not be generated +#endif + +RAPIDJSON_NAMESPACE_BEGIN + +static const SizeType kPointerInvalidIndex = ~SizeType(0); //!< Represents an invalid index in GenericPointer::Token + +//! Error code of parsing. +/*! \ingroup RAPIDJSON_ERRORS + \see GenericPointer::GenericPointer, GenericPointer::GetParseErrorCode +*/ +enum PointerParseErrorCode { + kPointerParseErrorNone = 0, //!< The parse is successful + + kPointerParseErrorTokenMustBeginWithSolidus, //!< A token must begin with a '/' + kPointerParseErrorInvalidEscape, //!< Invalid escape + kPointerParseErrorInvalidPercentEncoding, //!< Invalid percent encoding in URI fragment + kPointerParseErrorCharacterMustPercentEncode //!< A character must percent encoded in URI fragment +}; + +/////////////////////////////////////////////////////////////////////////////// +// GenericPointer + +//! Represents a JSON Pointer. Use Pointer for UTF8 encoding and default allocator. +/*! + This class implements RFC 6901 "JavaScript Object Notation (JSON) Pointer" + (https://tools.ietf.org/html/rfc6901). + + A JSON pointer is for identifying a specific value in a JSON document + (GenericDocument). It can simplify coding of DOM tree manipulation, because it + can access multiple-level depth of DOM tree with single API call. + + After it parses a string representation (e.g. "/foo/0" or URI fragment + representation (e.g. "#/foo/0") into its internal representation (tokens), + it can be used to resolve a specific value in multiple documents, or sub-tree + of documents. + + Contrary to GenericValue, Pointer can be copy constructed and copy assigned. + Apart from assignment, a Pointer cannot be modified after construction. + + Although Pointer is very convenient, please aware that constructing Pointer + involves parsing and dynamic memory allocation. A special constructor with user- + supplied tokens eliminates these. + + GenericPointer depends on GenericDocument and GenericValue. + + \tparam ValueType The value type of the DOM tree. E.g. GenericValue > + \tparam Allocator The allocator type for allocating memory for internal representation. + + \note GenericPointer uses same encoding of ValueType. + However, Allocator of GenericPointer is independent of Allocator of Value. +*/ +template +class GenericPointer { +public: + typedef typename ValueType::EncodingType EncodingType; //!< Encoding type from Value + typedef typename ValueType::Ch Ch; //!< Character type from Value + + //! A token is the basic units of internal representation. + /*! + A JSON pointer string representation "/foo/123" is parsed to two tokens: + "foo" and 123. 123 will be represented in both numeric form and string form. + They are resolved according to the actual value type (object or array). + + For token that are not numbers, or the numeric value is out of bound + (greater than limits of SizeType), they are only treated as string form + (i.e. the token's index will be equal to kPointerInvalidIndex). + + This struct is public so that user can create a Pointer without parsing and + allocation, using a special constructor. + */ + struct Token { + const Ch* name; //!< Name of the token. It has null character at the end but it can contain null character. + SizeType length; //!< Length of the name. + SizeType index; //!< A valid array index, if it is not equal to kPointerInvalidIndex. + }; + + //!@name Constructors and destructor. + //@{ + + //! Default constructor. + GenericPointer(Allocator* allocator = 0) : allocator_(allocator), ownAllocator_(), nameBuffer_(), tokens_(), tokenCount_(), parseErrorOffset_(), parseErrorCode_(kPointerParseErrorNone) {} + + //! Constructor that parses a string or URI fragment representation. + /*! + \param source A null-terminated, string or URI fragment representation of JSON pointer. + \param allocator User supplied allocator for this pointer. If no allocator is provided, it creates a self-owned one. + */ + explicit GenericPointer(const Ch* source, Allocator* allocator = 0) : allocator_(allocator), ownAllocator_(), nameBuffer_(), tokens_(), tokenCount_(), parseErrorOffset_(), parseErrorCode_(kPointerParseErrorNone) { + Parse(source, internal::StrLen(source)); + } + +#if RAPIDJSON_HAS_STDSTRING + //! Constructor that parses a string or URI fragment representation. + /*! + \param source A string or URI fragment representation of JSON pointer. + \param allocator User supplied allocator for this pointer. If no allocator is provided, it creates a self-owned one. + \note Requires the definition of the preprocessor symbol \ref RAPIDJSON_HAS_STDSTRING. + */ + explicit GenericPointer(const std::basic_string& source, Allocator* allocator = 0) : allocator_(allocator), ownAllocator_(), nameBuffer_(), tokens_(), tokenCount_(), parseErrorOffset_(), parseErrorCode_(kPointerParseErrorNone) { + Parse(source.c_str(), source.size()); + } +#endif + + //! Constructor that parses a string or URI fragment representation, with length of the source string. + /*! + \param source A string or URI fragment representation of JSON pointer. + \param length Length of source. + \param allocator User supplied allocator for this pointer. If no allocator is provided, it creates a self-owned one. + \note Slightly faster than the overload without length. + */ + GenericPointer(const Ch* source, size_t length, Allocator* allocator = 0) : allocator_(allocator), ownAllocator_(), nameBuffer_(), tokens_(), tokenCount_(), parseErrorOffset_(), parseErrorCode_(kPointerParseErrorNone) { + Parse(source, length); + } + + //! Constructor with user-supplied tokens. + /*! + This constructor let user supplies const array of tokens. + This prevents the parsing process and eliminates allocation. + This is preferred for memory constrained environments. + + \param tokens An constant array of tokens representing the JSON pointer. + \param tokenCount Number of tokens. + + \b Example + \code + #define NAME(s) { s, sizeof(s) / sizeof(s[0]) - 1, kPointerInvalidIndex } + #define INDEX(i) { #i, sizeof(#i) - 1, i } + + static const Pointer::Token kTokens[] = { NAME("foo"), INDEX(123) }; + static const Pointer p(kTokens, sizeof(kTokens) / sizeof(kTokens[0])); + // Equivalent to static const Pointer p("/foo/123"); + + #undef NAME + #undef INDEX + \endcode + */ + GenericPointer(const Token* tokens, size_t tokenCount) : allocator_(), ownAllocator_(), nameBuffer_(), tokens_(const_cast(tokens)), tokenCount_(tokenCount), parseErrorOffset_(), parseErrorCode_(kPointerParseErrorNone) {} + + //! Copy constructor. + GenericPointer(const GenericPointer& rhs, Allocator* allocator = 0) : allocator_(allocator), ownAllocator_(), nameBuffer_(), tokens_(), tokenCount_(), parseErrorOffset_(), parseErrorCode_(kPointerParseErrorNone) { + *this = rhs; + } + + //! Destructor. + ~GenericPointer() { + if (nameBuffer_) // If user-supplied tokens constructor is used, nameBuffer_ is nullptr and tokens_ are not deallocated. + Allocator::Free(tokens_); + RAPIDJSON_DELETE(ownAllocator_); + } + + //! Assignment operator. + GenericPointer& operator=(const GenericPointer& rhs) { + if (this != &rhs) { + // Do not delete ownAllcator + if (nameBuffer_) + Allocator::Free(tokens_); + + tokenCount_ = rhs.tokenCount_; + parseErrorOffset_ = rhs.parseErrorOffset_; + parseErrorCode_ = rhs.parseErrorCode_; + + if (rhs.nameBuffer_) + CopyFromRaw(rhs); // Normally parsed tokens. + else { + tokens_ = rhs.tokens_; // User supplied const tokens. + nameBuffer_ = 0; + } + } + return *this; + } + + //@} + + //!@name Append token + //@{ + + //! Append a token and return a new Pointer + /*! + \param token Token to be appended. + \param allocator Allocator for the newly return Pointer. + \return A new Pointer with appended token. + */ + GenericPointer Append(const Token& token, Allocator* allocator = 0) const { + GenericPointer r; + r.allocator_ = allocator; + Ch *p = r.CopyFromRaw(*this, 1, token.length + 1); + std::memcpy(p, token.name, (token.length + 1) * sizeof(Ch)); + r.tokens_[tokenCount_].name = p; + r.tokens_[tokenCount_].length = token.length; + r.tokens_[tokenCount_].index = token.index; + return r; + } + + //! Append a name token with length, and return a new Pointer + /*! + \param name Name to be appended. + \param length Length of name. + \param allocator Allocator for the newly return Pointer. + \return A new Pointer with appended token. + */ + GenericPointer Append(const Ch* name, SizeType length, Allocator* allocator = 0) const { + Token token = { name, length, kPointerInvalidIndex }; + return Append(token, allocator); + } + + //! Append a name token without length, and return a new Pointer + /*! + \param name Name (const Ch*) to be appended. + \param allocator Allocator for the newly return Pointer. + \return A new Pointer with appended token. + */ + template + RAPIDJSON_DISABLEIF_RETURN((internal::NotExpr::Type, Ch> >), (GenericPointer)) + Append(T* name, Allocator* allocator = 0) const { + return Append(name, StrLen(name), allocator); + } + +#if RAPIDJSON_HAS_STDSTRING + //! Append a name token, and return a new Pointer + /*! + \param name Name to be appended. + \param allocator Allocator for the newly return Pointer. + \return A new Pointer with appended token. + */ + GenericPointer Append(const std::basic_string& name, Allocator* allocator = 0) const { + return Append(name.c_str(), static_cast(name.size()), allocator); + } +#endif + + //! Append a index token, and return a new Pointer + /*! + \param index Index to be appended. + \param allocator Allocator for the newly return Pointer. + \return A new Pointer with appended token. + */ + GenericPointer Append(SizeType index, Allocator* allocator = 0) const { + char buffer[21]; + char* end = sizeof(SizeType) == 4 ? internal::u32toa(index, buffer) : internal::u64toa(index, buffer); + SizeType length = static_cast(end - buffer); + buffer[length] = '\0'; + + if (sizeof(Ch) == 1) { + Token token = { reinterpret_cast(buffer), length, index }; + return Append(token, allocator); + } + else { + Ch name[21]; + for (size_t i = 0; i <= length; i++) + name[i] = buffer[i]; + Token token = { name, length, index }; + return Append(token, allocator); + } + } + + //! Append a token by value, and return a new Pointer + /*! + \param token token to be appended. + \param allocator Allocator for the newly return Pointer. + \return A new Pointer with appended token. + */ + GenericPointer Append(const ValueType& token, Allocator* allocator = 0) const { + if (token.IsString()) + return Append(token.GetString(), token.GetStringLength(), allocator); + else { + RAPIDJSON_ASSERT(token.IsUint64()); + RAPIDJSON_ASSERT(token.GetUint64() <= SizeType(~0)); + return Append(static_cast(token.GetUint64()), allocator); + } + } + + //!@name Handling Parse Error + //@{ + + //! Check whether this is a valid pointer. + bool IsValid() const { return parseErrorCode_ == kPointerParseErrorNone; } + + //! Get the parsing error offset in code unit. + size_t GetParseErrorOffset() const { return parseErrorOffset_; } + + //! Get the parsing error code. + PointerParseErrorCode GetParseErrorCode() const { return parseErrorCode_; } + + //@} + + //! Get the allocator of this pointer. + Allocator& GetAllocator() { return *allocator_; } + + //!@name Tokens + //@{ + + //! Get the token array (const version only). + const Token* GetTokens() const { return tokens_; } + + //! Get the number of tokens. + size_t GetTokenCount() const { return tokenCount_; } + + //@} + + //!@name Equality/inequality operators + //@{ + + //! Equality operator. + /*! + \note When any pointers are invalid, always returns false. + */ + bool operator==(const GenericPointer& rhs) const { + if (!IsValid() || !rhs.IsValid() || tokenCount_ != rhs.tokenCount_) + return false; + + for (size_t i = 0; i < tokenCount_; i++) { + if (tokens_[i].index != rhs.tokens_[i].index || + tokens_[i].length != rhs.tokens_[i].length || + (tokens_[i].length != 0 && std::memcmp(tokens_[i].name, rhs.tokens_[i].name, sizeof(Ch)* tokens_[i].length) != 0)) + { + return false; + } + } + + return true; + } + + //! Inequality operator. + /*! + \note When any pointers are invalid, always returns true. + */ + bool operator!=(const GenericPointer& rhs) const { return !(*this == rhs); } + + //@} + + //!@name Stringify + //@{ + + //! Stringify the pointer into string representation. + /*! + \tparam OutputStream Type of output stream. + \param os The output stream. + */ + template + bool Stringify(OutputStream& os) const { + return Stringify(os); + } + + //! Stringify the pointer into URI fragment representation. + /*! + \tparam OutputStream Type of output stream. + \param os The output stream. + */ + template + bool StringifyUriFragment(OutputStream& os) const { + return Stringify(os); + } + + //@} + + //!@name Create value + //@{ + + //! Create a value in a subtree. + /*! + If the value is not exist, it creates all parent values and a JSON Null value. + So it always succeed and return the newly created or existing value. + + Remind that it may change types of parents according to tokens, so it + potentially removes previously stored values. For example, if a document + was an array, and "/foo" is used to create a value, then the document + will be changed to an object, and all existing array elements are lost. + + \param root Root value of a DOM subtree to be resolved. It can be any value other than document root. + \param allocator Allocator for creating the values if the specified value or its parents are not exist. + \param alreadyExist If non-null, it stores whether the resolved value is already exist. + \return The resolved newly created (a JSON Null value), or already exists value. + */ + ValueType& Create(ValueType& root, typename ValueType::AllocatorType& allocator, bool* alreadyExist = 0) const { + RAPIDJSON_ASSERT(IsValid()); + ValueType* v = &root; + bool exist = true; + for (const Token *t = tokens_; t != tokens_ + tokenCount_; ++t) { + if (v->IsArray() && t->name[0] == '-' && t->length == 1) { + v->PushBack(ValueType().Move(), allocator); + v = &((*v)[v->Size() - 1]); + exist = false; + } + else { + if (t->index == kPointerInvalidIndex) { // must be object name + if (!v->IsObject()) + v->SetObject(); // Change to Object + } + else { // object name or array index + if (!v->IsArray() && !v->IsObject()) + v->SetArray(); // Change to Array + } + + if (v->IsArray()) { + if (t->index >= v->Size()) { + v->Reserve(t->index + 1, allocator); + while (t->index >= v->Size()) + v->PushBack(ValueType().Move(), allocator); + exist = false; + } + v = &((*v)[t->index]); + } + else { + typename ValueType::MemberIterator m = v->FindMember(GenericStringRef(t->name, t->length)); + if (m == v->MemberEnd()) { + v->AddMember(ValueType(t->name, t->length, allocator).Move(), ValueType().Move(), allocator); + v = &(--v->MemberEnd())->value; // Assumes AddMember() appends at the end + exist = false; + } + else + v = &m->value; + } + } + } + + if (alreadyExist) + *alreadyExist = exist; + + return *v; + } + + //! Creates a value in a document. + /*! + \param document A document to be resolved. + \param alreadyExist If non-null, it stores whether the resolved value is already exist. + \return The resolved newly created, or already exists value. + */ + template + ValueType& Create(GenericDocument& document, bool* alreadyExist = 0) const { + return Create(document, document.GetAllocator(), alreadyExist); + } + + //@} + + //!@name Query value + //@{ + + //! Query a value in a subtree. + /*! + \param root Root value of a DOM sub-tree to be resolved. It can be any value other than document root. + \param unresolvedTokenIndex If the pointer cannot resolve a token in the pointer, this parameter can obtain the index of unresolved token. + \return Pointer to the value if it can be resolved. Otherwise null. + + \note + There are only 3 situations when a value cannot be resolved: + 1. A value in the path is not an array nor object. + 2. An object value does not contain the token. + 3. A token is out of range of an array value. + + Use unresolvedTokenIndex to retrieve the token index. + */ + ValueType* Get(ValueType& root, size_t* unresolvedTokenIndex = 0) const { + RAPIDJSON_ASSERT(IsValid()); + ValueType* v = &root; + for (const Token *t = tokens_; t != tokens_ + tokenCount_; ++t) { + switch (v->GetType()) { + case kObjectType: + { + typename ValueType::MemberIterator m = v->FindMember(GenericStringRef(t->name, t->length)); + if (m == v->MemberEnd()) + break; + v = &m->value; + } + continue; + case kArrayType: + if (t->index == kPointerInvalidIndex || t->index >= v->Size()) + break; + v = &((*v)[t->index]); + continue; + default: + break; + } + + // Error: unresolved token + if (unresolvedTokenIndex) + *unresolvedTokenIndex = static_cast(t - tokens_); + return 0; + } + return v; + } + + //! Query a const value in a const subtree. + /*! + \param root Root value of a DOM sub-tree to be resolved. It can be any value other than document root. + \return Pointer to the value if it can be resolved. Otherwise null. + */ + const ValueType* Get(const ValueType& root, size_t* unresolvedTokenIndex = 0) const { + return Get(const_cast(root), unresolvedTokenIndex); + } + + //@} + + //!@name Query a value with default + //@{ + + //! Query a value in a subtree with default value. + /*! + Similar to Get(), but if the specified value do not exists, it creates all parents and clone the default value. + So that this function always succeed. + + \param root Root value of a DOM sub-tree to be resolved. It can be any value other than document root. + \param defaultValue Default value to be cloned if the value was not exists. + \param allocator Allocator for creating the values if the specified value or its parents are not exist. + \see Create() + */ + ValueType& GetWithDefault(ValueType& root, const ValueType& defaultValue, typename ValueType::AllocatorType& allocator) const { + bool alreadyExist; + Value& v = Create(root, allocator, &alreadyExist); + return alreadyExist ? v : v.CopyFrom(defaultValue, allocator); + } + + //! Query a value in a subtree with default null-terminated string. + ValueType& GetWithDefault(ValueType& root, const Ch* defaultValue, typename ValueType::AllocatorType& allocator) const { + bool alreadyExist; + Value& v = Create(root, allocator, &alreadyExist); + return alreadyExist ? v : v.SetString(defaultValue, allocator); + } + +#if RAPIDJSON_HAS_STDSTRING + //! Query a value in a subtree with default std::basic_string. + ValueType& GetWithDefault(ValueType& root, const std::basic_string& defaultValue, typename ValueType::AllocatorType& allocator) const { + bool alreadyExist; + Value& v = Create(root, allocator, &alreadyExist); + return alreadyExist ? v : v.SetString(defaultValue, allocator); + } +#endif + + //! Query a value in a subtree with default primitive value. + /*! + \tparam T Either \ref Type, \c int, \c unsigned, \c int64_t, \c uint64_t, \c bool + */ + template + RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (ValueType&)) + GetWithDefault(ValueType& root, T defaultValue, typename ValueType::AllocatorType& allocator) const { + return GetWithDefault(root, ValueType(defaultValue).Move(), allocator); + } + + //! Query a value in a document with default value. + template + ValueType& GetWithDefault(GenericDocument& document, const ValueType& defaultValue) const { + return GetWithDefault(document, defaultValue, document.GetAllocator()); + } + + //! Query a value in a document with default null-terminated string. + template + ValueType& GetWithDefault(GenericDocument& document, const Ch* defaultValue) const { + return GetWithDefault(document, defaultValue, document.GetAllocator()); + } + +#if RAPIDJSON_HAS_STDSTRING + //! Query a value in a document with default std::basic_string. + template + ValueType& GetWithDefault(GenericDocument& document, const std::basic_string& defaultValue) const { + return GetWithDefault(document, defaultValue, document.GetAllocator()); + } +#endif + + //! Query a value in a document with default primitive value. + /*! + \tparam T Either \ref Type, \c int, \c unsigned, \c int64_t, \c uint64_t, \c bool + */ + template + RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (ValueType&)) + GetWithDefault(GenericDocument& document, T defaultValue) const { + return GetWithDefault(document, defaultValue, document.GetAllocator()); + } + + //@} + + //!@name Set a value + //@{ + + //! Set a value in a subtree, with move semantics. + /*! + It creates all parents if they are not exist or types are different to the tokens. + So this function always succeeds but potentially remove existing values. + + \param root Root value of a DOM sub-tree to be resolved. It can be any value other than document root. + \param value Value to be set. + \param allocator Allocator for creating the values if the specified value or its parents are not exist. + \see Create() + */ + ValueType& Set(ValueType& root, ValueType& value, typename ValueType::AllocatorType& allocator) const { + return Create(root, allocator) = value; + } + + //! Set a value in a subtree, with copy semantics. + ValueType& Set(ValueType& root, const ValueType& value, typename ValueType::AllocatorType& allocator) const { + return Create(root, allocator).CopyFrom(value, allocator); + } + + //! Set a null-terminated string in a subtree. + ValueType& Set(ValueType& root, const Ch* value, typename ValueType::AllocatorType& allocator) const { + return Create(root, allocator) = ValueType(value, allocator).Move(); + } + +#if RAPIDJSON_HAS_STDSTRING + //! Set a std::basic_string in a subtree. + ValueType& Set(ValueType& root, const std::basic_string& value, typename ValueType::AllocatorType& allocator) const { + return Create(root, allocator) = ValueType(value, allocator).Move(); + } +#endif + + //! Set a primitive value in a subtree. + /*! + \tparam T Either \ref Type, \c int, \c unsigned, \c int64_t, \c uint64_t, \c bool + */ + template + RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (ValueType&)) + Set(ValueType& root, T value, typename ValueType::AllocatorType& allocator) const { + return Create(root, allocator) = ValueType(value).Move(); + } + + //! Set a value in a document, with move semantics. + template + ValueType& Set(GenericDocument& document, ValueType& value) const { + return Create(document) = value; + } + + //! Set a value in a document, with copy semantics. + template + ValueType& Set(GenericDocument& document, const ValueType& value) const { + return Create(document).CopyFrom(value, document.GetAllocator()); + } + + //! Set a null-terminated string in a document. + template + ValueType& Set(GenericDocument& document, const Ch* value) const { + return Create(document) = ValueType(value, document.GetAllocator()).Move(); + } + +#if RAPIDJSON_HAS_STDSTRING + //! Sets a std::basic_string in a document. + template + ValueType& Set(GenericDocument& document, const std::basic_string& value) const { + return Create(document) = ValueType(value, document.GetAllocator()).Move(); + } +#endif + + //! Set a primitive value in a document. + /*! + \tparam T Either \ref Type, \c int, \c unsigned, \c int64_t, \c uint64_t, \c bool + */ + template + RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (ValueType&)) + Set(GenericDocument& document, T value) const { + return Create(document) = value; + } + + //@} + + //!@name Swap a value + //@{ + + //! Swap a value with a value in a subtree. + /*! + It creates all parents if they are not exist or types are different to the tokens. + So this function always succeeds but potentially remove existing values. + + \param root Root value of a DOM sub-tree to be resolved. It can be any value other than document root. + \param value Value to be swapped. + \param allocator Allocator for creating the values if the specified value or its parents are not exist. + \see Create() + */ + ValueType& Swap(ValueType& root, ValueType& value, typename ValueType::AllocatorType& allocator) const { + return Create(root, allocator).Swap(value); + } + + //! Swap a value with a value in a document. + template + ValueType& Swap(GenericDocument& document, ValueType& value) const { + return Create(document).Swap(value); + } + + //@} + + //! Erase a value in a subtree. + /*! + \param root Root value of a DOM sub-tree to be resolved. It can be any value other than document root. + \return Whether the resolved value is found and erased. + + \note Erasing with an empty pointer \c Pointer(""), i.e. the root, always fail and return false. + */ + bool Erase(ValueType& root) const { + RAPIDJSON_ASSERT(IsValid()); + if (tokenCount_ == 0) // Cannot erase the root + return false; + + ValueType* v = &root; + const Token* last = tokens_ + (tokenCount_ - 1); + for (const Token *t = tokens_; t != last; ++t) { + switch (v->GetType()) { + case kObjectType: + { + typename ValueType::MemberIterator m = v->FindMember(GenericStringRef(t->name, t->length)); + if (m == v->MemberEnd()) + return false; + v = &m->value; + } + break; + case kArrayType: + if (t->index == kPointerInvalidIndex || t->index >= v->Size()) + return false; + v = &((*v)[t->index]); + break; + default: + return false; + } + } + + switch (v->GetType()) { + case kObjectType: + return v->EraseMember(GenericStringRef(last->name, last->length)); + case kArrayType: + if (last->index == kPointerInvalidIndex || last->index >= v->Size()) + return false; + v->Erase(v->Begin() + last->index); + return true; + default: + return false; + } + } + +private: + //! Clone the content from rhs to this. + /*! + \param rhs Source pointer. + \param extraToken Extra tokens to be allocated. + \param extraNameBufferSize Extra name buffer size (in number of Ch) to be allocated. + \return Start of non-occupied name buffer, for storing extra names. + */ + Ch* CopyFromRaw(const GenericPointer& rhs, size_t extraToken = 0, size_t extraNameBufferSize = 0) { + if (!allocator_) // allocator is independently owned. + ownAllocator_ = allocator_ = RAPIDJSON_NEW(Allocator()); + + size_t nameBufferSize = rhs.tokenCount_; // null terminators for tokens + for (Token *t = rhs.tokens_; t != rhs.tokens_ + rhs.tokenCount_; ++t) + nameBufferSize += t->length; + + tokenCount_ = rhs.tokenCount_ + extraToken; + tokens_ = static_cast(allocator_->Malloc(tokenCount_ * sizeof(Token) + (nameBufferSize + extraNameBufferSize) * sizeof(Ch))); + nameBuffer_ = reinterpret_cast(tokens_ + tokenCount_); + if (rhs.tokenCount_ > 0) { + std::memcpy(tokens_, rhs.tokens_, rhs.tokenCount_ * sizeof(Token)); + } + if (nameBufferSize > 0) { + std::memcpy(nameBuffer_, rhs.nameBuffer_, nameBufferSize * sizeof(Ch)); + } + + // Adjust pointers to name buffer + std::ptrdiff_t diff = nameBuffer_ - rhs.nameBuffer_; + for (Token *t = tokens_; t != tokens_ + rhs.tokenCount_; ++t) + t->name += diff; + + return nameBuffer_ + nameBufferSize; + } + + //! Check whether a character should be percent-encoded. + /*! + According to RFC 3986 2.3 Unreserved Characters. + \param c The character (code unit) to be tested. + */ + bool NeedPercentEncode(Ch c) const { + return !((c >= '0' && c <= '9') || (c >= 'A' && c <='Z') || (c >= 'a' && c <= 'z') || c == '-' || c == '.' || c == '_' || c =='~'); + } + + //! Parse a JSON String or its URI fragment representation into tokens. +#ifndef __clang__ // -Wdocumentation + /*! + \param source Either a JSON Pointer string, or its URI fragment representation. Not need to be null terminated. + \param length Length of the source string. + \note Source cannot be JSON String Representation of JSON Pointer, e.g. In "/\u0000", \u0000 will not be unescaped. + */ +#endif + void Parse(const Ch* source, size_t length) { + RAPIDJSON_ASSERT(source != NULL); + RAPIDJSON_ASSERT(nameBuffer_ == 0); + RAPIDJSON_ASSERT(tokens_ == 0); + + // Create own allocator if user did not supply. + if (!allocator_) + ownAllocator_ = allocator_ = RAPIDJSON_NEW(Allocator()); + + // Count number of '/' as tokenCount + tokenCount_ = 0; + for (const Ch* s = source; s != source + length; s++) + if (*s == '/') + tokenCount_++; + + Token* token = tokens_ = static_cast(allocator_->Malloc(tokenCount_ * sizeof(Token) + length * sizeof(Ch))); + Ch* name = nameBuffer_ = reinterpret_cast(tokens_ + tokenCount_); + size_t i = 0; + + // Detect if it is a URI fragment + bool uriFragment = false; + if (source[i] == '#') { + uriFragment = true; + i++; + } + + if (i != length && source[i] != '/') { + parseErrorCode_ = kPointerParseErrorTokenMustBeginWithSolidus; + goto error; + } + + while (i < length) { + RAPIDJSON_ASSERT(source[i] == '/'); + i++; // consumes '/' + + token->name = name; + bool isNumber = true; + + while (i < length && source[i] != '/') { + Ch c = source[i]; + if (uriFragment) { + // Decoding percent-encoding for URI fragment + if (c == '%') { + PercentDecodeStream is(&source[i], source + length); + GenericInsituStringStream os(name); + Ch* begin = os.PutBegin(); + if (!Transcoder, EncodingType>().Validate(is, os) || !is.IsValid()) { + parseErrorCode_ = kPointerParseErrorInvalidPercentEncoding; + goto error; + } + size_t len = os.PutEnd(begin); + i += is.Tell() - 1; + if (len == 1) + c = *name; + else { + name += len; + isNumber = false; + i++; + continue; + } + } + else if (NeedPercentEncode(c)) { + parseErrorCode_ = kPointerParseErrorCharacterMustPercentEncode; + goto error; + } + } + + i++; + + // Escaping "~0" -> '~', "~1" -> '/' + if (c == '~') { + if (i < length) { + c = source[i]; + if (c == '0') c = '~'; + else if (c == '1') c = '/'; + else { + parseErrorCode_ = kPointerParseErrorInvalidEscape; + goto error; + } + i++; + } + else { + parseErrorCode_ = kPointerParseErrorInvalidEscape; + goto error; + } + } + + // First check for index: all of characters are digit + if (c < '0' || c > '9') + isNumber = false; + + *name++ = c; + } + token->length = static_cast(name - token->name); + if (token->length == 0) + isNumber = false; + *name++ = '\0'; // Null terminator + + // Second check for index: more than one digit cannot have leading zero + if (isNumber && token->length > 1 && token->name[0] == '0') + isNumber = false; + + // String to SizeType conversion + SizeType n = 0; + if (isNumber) { + for (size_t j = 0; j < token->length; j++) { + SizeType m = n * 10 + static_cast(token->name[j] - '0'); + if (m < n) { // overflow detection + isNumber = false; + break; + } + n = m; + } + } + + token->index = isNumber ? n : kPointerInvalidIndex; + token++; + } + + RAPIDJSON_ASSERT(name <= nameBuffer_ + length); // Should not overflow buffer + parseErrorCode_ = kPointerParseErrorNone; + return; + + error: + Allocator::Free(tokens_); + nameBuffer_ = 0; + tokens_ = 0; + tokenCount_ = 0; + parseErrorOffset_ = i; + return; + } + + //! Stringify to string or URI fragment representation. + /*! + \tparam uriFragment True for stringifying to URI fragment representation. False for string representation. + \tparam OutputStream type of output stream. + \param os The output stream. + */ + template + bool Stringify(OutputStream& os) const { + RAPIDJSON_ASSERT(IsValid()); + + if (uriFragment) + os.Put('#'); + + for (Token *t = tokens_; t != tokens_ + tokenCount_; ++t) { + os.Put('/'); + for (size_t j = 0; j < t->length; j++) { + Ch c = t->name[j]; + if (c == '~') { + os.Put('~'); + os.Put('0'); + } + else if (c == '/') { + os.Put('~'); + os.Put('1'); + } + else if (uriFragment && NeedPercentEncode(c)) { + // Transcode to UTF8 sequence + GenericStringStream source(&t->name[j]); + PercentEncodeStream target(os); + if (!Transcoder >().Validate(source, target)) + return false; + j += source.Tell() - 1; + } + else + os.Put(c); + } + } + return true; + } + + //! A helper stream for decoding a percent-encoded sequence into code unit. + /*! + This stream decodes %XY triplet into code unit (0-255). + If it encounters invalid characters, it sets output code unit as 0 and + mark invalid, and to be checked by IsValid(). + */ + class PercentDecodeStream { + public: + typedef typename ValueType::Ch Ch; + + //! Constructor + /*! + \param source Start of the stream + \param end Past-the-end of the stream. + */ + PercentDecodeStream(const Ch* source, const Ch* end) : src_(source), head_(source), end_(end), valid_(true) {} + + Ch Take() { + if (*src_ != '%' || src_ + 3 > end_) { // %XY triplet + valid_ = false; + return 0; + } + src_++; + Ch c = 0; + for (int j = 0; j < 2; j++) { + c = static_cast(c << 4); + Ch h = *src_; + if (h >= '0' && h <= '9') c = static_cast(c + h - '0'); + else if (h >= 'A' && h <= 'F') c = static_cast(c + h - 'A' + 10); + else if (h >= 'a' && h <= 'f') c = static_cast(c + h - 'a' + 10); + else { + valid_ = false; + return 0; + } + src_++; + } + return c; + } + + size_t Tell() const { return static_cast(src_ - head_); } + bool IsValid() const { return valid_; } + + private: + const Ch* src_; //!< Current read position. + const Ch* head_; //!< Original head of the string. + const Ch* end_; //!< Past-the-end position. + bool valid_; //!< Whether the parsing is valid. + }; + + //! A helper stream to encode character (UTF-8 code unit) into percent-encoded sequence. + template + class PercentEncodeStream { + public: + PercentEncodeStream(OutputStream& os) : os_(os) {} + void Put(char c) { // UTF-8 must be byte + unsigned char u = static_cast(c); + static const char hexDigits[16] = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' }; + os_.Put('%'); + os_.Put(hexDigits[u >> 4]); + os_.Put(hexDigits[u & 15]); + } + private: + OutputStream& os_; + }; + + Allocator* allocator_; //!< The current allocator. It is either user-supplied or equal to ownAllocator_. + Allocator* ownAllocator_; //!< Allocator owned by this Pointer. + Ch* nameBuffer_; //!< A buffer containing all names in tokens. + Token* tokens_; //!< A list of tokens. + size_t tokenCount_; //!< Number of tokens in tokens_. + size_t parseErrorOffset_; //!< Offset in code unit when parsing fail. + PointerParseErrorCode parseErrorCode_; //!< Parsing error code. +}; + +//! GenericPointer for Value (UTF-8, default allocator). +typedef GenericPointer Pointer; + +//!@name Helper functions for GenericPointer +//@{ + +////////////////////////////////////////////////////////////////////////////// + +template +typename T::ValueType& CreateValueByPointer(T& root, const GenericPointer& pointer, typename T::AllocatorType& a) { + return pointer.Create(root, a); +} + +template +typename T::ValueType& CreateValueByPointer(T& root, const CharType(&source)[N], typename T::AllocatorType& a) { + return GenericPointer(source, N - 1).Create(root, a); +} + +// No allocator parameter + +template +typename DocumentType::ValueType& CreateValueByPointer(DocumentType& document, const GenericPointer& pointer) { + return pointer.Create(document); +} + +template +typename DocumentType::ValueType& CreateValueByPointer(DocumentType& document, const CharType(&source)[N]) { + return GenericPointer(source, N - 1).Create(document); +} + +////////////////////////////////////////////////////////////////////////////// + +template +typename T::ValueType* GetValueByPointer(T& root, const GenericPointer& pointer, size_t* unresolvedTokenIndex = 0) { + return pointer.Get(root, unresolvedTokenIndex); +} + +template +const typename T::ValueType* GetValueByPointer(const T& root, const GenericPointer& pointer, size_t* unresolvedTokenIndex = 0) { + return pointer.Get(root, unresolvedTokenIndex); +} + +template +typename T::ValueType* GetValueByPointer(T& root, const CharType (&source)[N], size_t* unresolvedTokenIndex = 0) { + return GenericPointer(source, N - 1).Get(root, unresolvedTokenIndex); +} + +template +const typename T::ValueType* GetValueByPointer(const T& root, const CharType(&source)[N], size_t* unresolvedTokenIndex = 0) { + return GenericPointer(source, N - 1).Get(root, unresolvedTokenIndex); +} + +////////////////////////////////////////////////////////////////////////////// + +template +typename T::ValueType& GetValueByPointerWithDefault(T& root, const GenericPointer& pointer, const typename T::ValueType& defaultValue, typename T::AllocatorType& a) { + return pointer.GetWithDefault(root, defaultValue, a); +} + +template +typename T::ValueType& GetValueByPointerWithDefault(T& root, const GenericPointer& pointer, const typename T::Ch* defaultValue, typename T::AllocatorType& a) { + return pointer.GetWithDefault(root, defaultValue, a); +} + +#if RAPIDJSON_HAS_STDSTRING +template +typename T::ValueType& GetValueByPointerWithDefault(T& root, const GenericPointer& pointer, const std::basic_string& defaultValue, typename T::AllocatorType& a) { + return pointer.GetWithDefault(root, defaultValue, a); +} +#endif + +template +RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (typename T::ValueType&)) +GetValueByPointerWithDefault(T& root, const GenericPointer& pointer, T2 defaultValue, typename T::AllocatorType& a) { + return pointer.GetWithDefault(root, defaultValue, a); +} + +template +typename T::ValueType& GetValueByPointerWithDefault(T& root, const CharType(&source)[N], const typename T::ValueType& defaultValue, typename T::AllocatorType& a) { + return GenericPointer(source, N - 1).GetWithDefault(root, defaultValue, a); +} + +template +typename T::ValueType& GetValueByPointerWithDefault(T& root, const CharType(&source)[N], const typename T::Ch* defaultValue, typename T::AllocatorType& a) { + return GenericPointer(source, N - 1).GetWithDefault(root, defaultValue, a); +} + +#if RAPIDJSON_HAS_STDSTRING +template +typename T::ValueType& GetValueByPointerWithDefault(T& root, const CharType(&source)[N], const std::basic_string& defaultValue, typename T::AllocatorType& a) { + return GenericPointer(source, N - 1).GetWithDefault(root, defaultValue, a); +} +#endif + +template +RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (typename T::ValueType&)) +GetValueByPointerWithDefault(T& root, const CharType(&source)[N], T2 defaultValue, typename T::AllocatorType& a) { + return GenericPointer(source, N - 1).GetWithDefault(root, defaultValue, a); +} + +// No allocator parameter + +template +typename DocumentType::ValueType& GetValueByPointerWithDefault(DocumentType& document, const GenericPointer& pointer, const typename DocumentType::ValueType& defaultValue) { + return pointer.GetWithDefault(document, defaultValue); +} + +template +typename DocumentType::ValueType& GetValueByPointerWithDefault(DocumentType& document, const GenericPointer& pointer, const typename DocumentType::Ch* defaultValue) { + return pointer.GetWithDefault(document, defaultValue); +} + +#if RAPIDJSON_HAS_STDSTRING +template +typename DocumentType::ValueType& GetValueByPointerWithDefault(DocumentType& document, const GenericPointer& pointer, const std::basic_string& defaultValue) { + return pointer.GetWithDefault(document, defaultValue); +} +#endif + +template +RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (typename DocumentType::ValueType&)) +GetValueByPointerWithDefault(DocumentType& document, const GenericPointer& pointer, T2 defaultValue) { + return pointer.GetWithDefault(document, defaultValue); +} + +template +typename DocumentType::ValueType& GetValueByPointerWithDefault(DocumentType& document, const CharType(&source)[N], const typename DocumentType::ValueType& defaultValue) { + return GenericPointer(source, N - 1).GetWithDefault(document, defaultValue); +} + +template +typename DocumentType::ValueType& GetValueByPointerWithDefault(DocumentType& document, const CharType(&source)[N], const typename DocumentType::Ch* defaultValue) { + return GenericPointer(source, N - 1).GetWithDefault(document, defaultValue); +} + +#if RAPIDJSON_HAS_STDSTRING +template +typename DocumentType::ValueType& GetValueByPointerWithDefault(DocumentType& document, const CharType(&source)[N], const std::basic_string& defaultValue) { + return GenericPointer(source, N - 1).GetWithDefault(document, defaultValue); +} +#endif + +template +RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (typename DocumentType::ValueType&)) +GetValueByPointerWithDefault(DocumentType& document, const CharType(&source)[N], T2 defaultValue) { + return GenericPointer(source, N - 1).GetWithDefault(document, defaultValue); +} + +////////////////////////////////////////////////////////////////////////////// + +template +typename T::ValueType& SetValueByPointer(T& root, const GenericPointer& pointer, typename T::ValueType& value, typename T::AllocatorType& a) { + return pointer.Set(root, value, a); +} + +template +typename T::ValueType& SetValueByPointer(T& root, const GenericPointer& pointer, const typename T::ValueType& value, typename T::AllocatorType& a) { + return pointer.Set(root, value, a); +} + +template +typename T::ValueType& SetValueByPointer(T& root, const GenericPointer& pointer, const typename T::Ch* value, typename T::AllocatorType& a) { + return pointer.Set(root, value, a); +} + +#if RAPIDJSON_HAS_STDSTRING +template +typename T::ValueType& SetValueByPointer(T& root, const GenericPointer& pointer, const std::basic_string& value, typename T::AllocatorType& a) { + return pointer.Set(root, value, a); +} +#endif + +template +RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (typename T::ValueType&)) +SetValueByPointer(T& root, const GenericPointer& pointer, T2 value, typename T::AllocatorType& a) { + return pointer.Set(root, value, a); +} + +template +typename T::ValueType& SetValueByPointer(T& root, const CharType(&source)[N], typename T::ValueType& value, typename T::AllocatorType& a) { + return GenericPointer(source, N - 1).Set(root, value, a); +} + +template +typename T::ValueType& SetValueByPointer(T& root, const CharType(&source)[N], const typename T::ValueType& value, typename T::AllocatorType& a) { + return GenericPointer(source, N - 1).Set(root, value, a); +} + +template +typename T::ValueType& SetValueByPointer(T& root, const CharType(&source)[N], const typename T::Ch* value, typename T::AllocatorType& a) { + return GenericPointer(source, N - 1).Set(root, value, a); +} + +#if RAPIDJSON_HAS_STDSTRING +template +typename T::ValueType& SetValueByPointer(T& root, const CharType(&source)[N], const std::basic_string& value, typename T::AllocatorType& a) { + return GenericPointer(source, N - 1).Set(root, value, a); +} +#endif + +template +RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (typename T::ValueType&)) +SetValueByPointer(T& root, const CharType(&source)[N], T2 value, typename T::AllocatorType& a) { + return GenericPointer(source, N - 1).Set(root, value, a); +} + +// No allocator parameter + +template +typename DocumentType::ValueType& SetValueByPointer(DocumentType& document, const GenericPointer& pointer, typename DocumentType::ValueType& value) { + return pointer.Set(document, value); +} + +template +typename DocumentType::ValueType& SetValueByPointer(DocumentType& document, const GenericPointer& pointer, const typename DocumentType::ValueType& value) { + return pointer.Set(document, value); +} + +template +typename DocumentType::ValueType& SetValueByPointer(DocumentType& document, const GenericPointer& pointer, const typename DocumentType::Ch* value) { + return pointer.Set(document, value); +} + +#if RAPIDJSON_HAS_STDSTRING +template +typename DocumentType::ValueType& SetValueByPointer(DocumentType& document, const GenericPointer& pointer, const std::basic_string& value) { + return pointer.Set(document, value); +} +#endif + +template +RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (typename DocumentType::ValueType&)) +SetValueByPointer(DocumentType& document, const GenericPointer& pointer, T2 value) { + return pointer.Set(document, value); +} + +template +typename DocumentType::ValueType& SetValueByPointer(DocumentType& document, const CharType(&source)[N], typename DocumentType::ValueType& value) { + return GenericPointer(source, N - 1).Set(document, value); +} + +template +typename DocumentType::ValueType& SetValueByPointer(DocumentType& document, const CharType(&source)[N], const typename DocumentType::ValueType& value) { + return GenericPointer(source, N - 1).Set(document, value); +} + +template +typename DocumentType::ValueType& SetValueByPointer(DocumentType& document, const CharType(&source)[N], const typename DocumentType::Ch* value) { + return GenericPointer(source, N - 1).Set(document, value); +} + +#if RAPIDJSON_HAS_STDSTRING +template +typename DocumentType::ValueType& SetValueByPointer(DocumentType& document, const CharType(&source)[N], const std::basic_string& value) { + return GenericPointer(source, N - 1).Set(document, value); +} +#endif + +template +RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr, internal::IsGenericValue >), (typename DocumentType::ValueType&)) +SetValueByPointer(DocumentType& document, const CharType(&source)[N], T2 value) { + return GenericPointer(source, N - 1).Set(document, value); +} + +////////////////////////////////////////////////////////////////////////////// + +template +typename T::ValueType& SwapValueByPointer(T& root, const GenericPointer& pointer, typename T::ValueType& value, typename T::AllocatorType& a) { + return pointer.Swap(root, value, a); +} + +template +typename T::ValueType& SwapValueByPointer(T& root, const CharType(&source)[N], typename T::ValueType& value, typename T::AllocatorType& a) { + return GenericPointer(source, N - 1).Swap(root, value, a); +} + +template +typename DocumentType::ValueType& SwapValueByPointer(DocumentType& document, const GenericPointer& pointer, typename DocumentType::ValueType& value) { + return pointer.Swap(document, value); +} + +template +typename DocumentType::ValueType& SwapValueByPointer(DocumentType& document, const CharType(&source)[N], typename DocumentType::ValueType& value) { + return GenericPointer(source, N - 1).Swap(document, value); +} + +////////////////////////////////////////////////////////////////////////////// + +template +bool EraseValueByPointer(T& root, const GenericPointer& pointer) { + return pointer.Erase(root); +} + +template +bool EraseValueByPointer(T& root, const CharType(&source)[N]) { + return GenericPointer(source, N - 1).Erase(root); +} + +//@} + +RAPIDJSON_NAMESPACE_END + +#ifdef __clang__ +RAPIDJSON_DIAG_POP +#endif + +#ifdef _MSC_VER +RAPIDJSON_DIAG_POP +#endif + +#endif // RAPIDJSON_POINTER_H_ diff --git a/include/rapidjson/prettywriter.h b/include/rapidjson/prettywriter.h new file mode 100644 index 0000000..0dcb0fe --- /dev/null +++ b/include/rapidjson/prettywriter.h @@ -0,0 +1,255 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_PRETTYWRITER_H_ +#define RAPIDJSON_PRETTYWRITER_H_ + +#include "writer.h" + +#ifdef __GNUC__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(effc++) +#endif + +RAPIDJSON_NAMESPACE_BEGIN + +//! Combination of PrettyWriter format flags. +/*! \see PrettyWriter::SetFormatOptions + */ +enum PrettyFormatOptions { + kFormatDefault = 0, //!< Default pretty formatting. + kFormatSingleLineArray = 1 //!< Format arrays on a single line. +}; + +//! Writer with indentation and spacing. +/*! + \tparam OutputStream Type of ouptut os. + \tparam SourceEncoding Encoding of source string. + \tparam TargetEncoding Encoding of output stream. + \tparam StackAllocator Type of allocator for allocating memory of stack. +*/ +template, typename TargetEncoding = UTF8<>, typename StackAllocator = CrtAllocator, unsigned writeFlags = kWriteDefaultFlags> +class PrettyWriter : public Writer { +public: + typedef Writer Base; + typedef typename Base::Ch Ch; + + //! Constructor + /*! \param os Output stream. + \param allocator User supplied allocator. If it is null, it will create a private one. + \param levelDepth Initial capacity of stack. + */ + explicit PrettyWriter(OutputStream& os, StackAllocator* allocator = 0, size_t levelDepth = Base::kDefaultLevelDepth) : + Base(os, allocator, levelDepth), indentChar_(' '), indentCharCount_(4), formatOptions_(kFormatDefault) {} + + + explicit PrettyWriter(StackAllocator* allocator = 0, size_t levelDepth = Base::kDefaultLevelDepth) : + Base(allocator, levelDepth), indentChar_(' '), indentCharCount_(4) {} + + //! Set custom indentation. + /*! \param indentChar Character for indentation. Must be whitespace character (' ', '\\t', '\\n', '\\r'). + \param indentCharCount Number of indent characters for each indentation level. + \note The default indentation is 4 spaces. + */ + PrettyWriter& SetIndent(Ch indentChar, unsigned indentCharCount) { + RAPIDJSON_ASSERT(indentChar == ' ' || indentChar == '\t' || indentChar == '\n' || indentChar == '\r'); + indentChar_ = indentChar; + indentCharCount_ = indentCharCount; + return *this; + } + + //! Set pretty writer formatting options. + /*! \param options Formatting options. + */ + PrettyWriter& SetFormatOptions(PrettyFormatOptions options) { + formatOptions_ = options; + return *this; + } + + /*! @name Implementation of Handler + \see Handler + */ + //@{ + + bool Null() { PrettyPrefix(kNullType); return Base::WriteNull(); } + bool Bool(bool b) { PrettyPrefix(b ? kTrueType : kFalseType); return Base::WriteBool(b); } + bool Int(int i) { PrettyPrefix(kNumberType); return Base::WriteInt(i); } + bool Uint(unsigned u) { PrettyPrefix(kNumberType); return Base::WriteUint(u); } + bool Int64(int64_t i64) { PrettyPrefix(kNumberType); return Base::WriteInt64(i64); } + bool Uint64(uint64_t u64) { PrettyPrefix(kNumberType); return Base::WriteUint64(u64); } + bool Double(double d) { PrettyPrefix(kNumberType); return Base::WriteDouble(d); } + + bool RawNumber(const Ch* str, SizeType length, bool copy = false) { + (void)copy; + PrettyPrefix(kNumberType); + return Base::WriteString(str, length); + } + + bool String(const Ch* str, SizeType length, bool copy = false) { + (void)copy; + PrettyPrefix(kStringType); + return Base::WriteString(str, length); + } + +#if RAPIDJSON_HAS_STDSTRING + bool String(const std::basic_string& str) { + return String(str.data(), SizeType(str.size())); + } +#endif + + bool StartObject() { + PrettyPrefix(kObjectType); + new (Base::level_stack_.template Push()) typename Base::Level(false); + return Base::WriteStartObject(); + } + + bool Key(const Ch* str, SizeType length, bool copy = false) { return String(str, length, copy); } + +#if RAPIDJSON_HAS_STDSTRING + bool Key(const std::basic_string& str) { + return Key(str.data(), SizeType(str.size())); + } +#endif + + bool EndObject(SizeType memberCount = 0) { + (void)memberCount; + RAPIDJSON_ASSERT(Base::level_stack_.GetSize() >= sizeof(typename Base::Level)); + RAPIDJSON_ASSERT(!Base::level_stack_.template Top()->inArray); + bool empty = Base::level_stack_.template Pop(1)->valueCount == 0; + + if (!empty) { + Base::os_->Put('\n'); + WriteIndent(); + } + bool ret = Base::WriteEndObject(); + (void)ret; + RAPIDJSON_ASSERT(ret == true); + if (Base::level_stack_.Empty()) // end of json text + Base::os_->Flush(); + return true; + } + + bool StartArray() { + PrettyPrefix(kArrayType); + new (Base::level_stack_.template Push()) typename Base::Level(true); + return Base::WriteStartArray(); + } + + bool EndArray(SizeType memberCount = 0) { + (void)memberCount; + RAPIDJSON_ASSERT(Base::level_stack_.GetSize() >= sizeof(typename Base::Level)); + RAPIDJSON_ASSERT(Base::level_stack_.template Top()->inArray); + bool empty = Base::level_stack_.template Pop(1)->valueCount == 0; + + if (!empty && !(formatOptions_ & kFormatSingleLineArray)) { + Base::os_->Put('\n'); + WriteIndent(); + } + bool ret = Base::WriteEndArray(); + (void)ret; + RAPIDJSON_ASSERT(ret == true); + if (Base::level_stack_.Empty()) // end of json text + Base::os_->Flush(); + return true; + } + + //@} + + /*! @name Convenience extensions */ + //@{ + + //! Simpler but slower overload. + bool String(const Ch* str) { return String(str, internal::StrLen(str)); } + bool Key(const Ch* str) { return Key(str, internal::StrLen(str)); } + + //@} + + //! Write a raw JSON value. + /*! + For user to write a stringified JSON as a value. + + \param json A well-formed JSON value. It should not contain null character within [0, length - 1] range. + \param length Length of the json. + \param type Type of the root of json. + \note When using PrettyWriter::RawValue(), the result json may not be indented correctly. + */ + bool RawValue(const Ch* json, size_t length, Type type) { PrettyPrefix(type); return Base::WriteRawValue(json, length); } + +protected: + void PrettyPrefix(Type type) { + (void)type; + if (Base::level_stack_.GetSize() != 0) { // this value is not at root + typename Base::Level* level = Base::level_stack_.template Top(); + + if (level->inArray) { + if (level->valueCount > 0) { + Base::os_->Put(','); // add comma if it is not the first element in array + if (formatOptions_ & kFormatSingleLineArray) + Base::os_->Put(' '); + } + + if (!(formatOptions_ & kFormatSingleLineArray)) { + Base::os_->Put('\n'); + WriteIndent(); + } + } + else { // in object + if (level->valueCount > 0) { + if (level->valueCount % 2 == 0) { + Base::os_->Put(','); + Base::os_->Put('\n'); + } + else { + Base::os_->Put(':'); + Base::os_->Put(' '); + } + } + else + Base::os_->Put('\n'); + + if (level->valueCount % 2 == 0) + WriteIndent(); + } + if (!level->inArray && level->valueCount % 2 == 0) + RAPIDJSON_ASSERT(type == kStringType); // if it's in object, then even number should be a name + level->valueCount++; + } + else { + RAPIDJSON_ASSERT(!Base::hasRoot_); // Should only has one and only one root. + Base::hasRoot_ = true; + } + } + + void WriteIndent() { + size_t count = (Base::level_stack_.GetSize() / sizeof(typename Base::Level)) * indentCharCount_; + PutN(*Base::os_, static_cast(indentChar_), count); + } + + Ch indentChar_; + unsigned indentCharCount_; + PrettyFormatOptions formatOptions_; + +private: + // Prohibit copy constructor & assignment operator. + PrettyWriter(const PrettyWriter&); + PrettyWriter& operator=(const PrettyWriter&); +}; + +RAPIDJSON_NAMESPACE_END + +#ifdef __GNUC__ +RAPIDJSON_DIAG_POP +#endif + +#endif // RAPIDJSON_RAPIDJSON_H_ diff --git a/include/rapidjson/rapidjson.h b/include/rapidjson/rapidjson.h new file mode 100644 index 0000000..053b2ce --- /dev/null +++ b/include/rapidjson/rapidjson.h @@ -0,0 +1,615 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_RAPIDJSON_H_ +#define RAPIDJSON_RAPIDJSON_H_ + +/*!\file rapidjson.h + \brief common definitions and configuration + + \see RAPIDJSON_CONFIG + */ + +/*! \defgroup RAPIDJSON_CONFIG RapidJSON configuration + \brief Configuration macros for library features + + Some RapidJSON features are configurable to adapt the library to a wide + variety of platforms, environments and usage scenarios. Most of the + features can be configured in terms of overriden or predefined + preprocessor macros at compile-time. + + Some additional customization is available in the \ref RAPIDJSON_ERRORS APIs. + + \note These macros should be given on the compiler command-line + (where applicable) to avoid inconsistent values when compiling + different translation units of a single application. + */ + +#include // malloc(), realloc(), free(), size_t +#include // memset(), memcpy(), memmove(), memcmp() + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_VERSION_STRING +// +// ALWAYS synchronize the following 3 macros with corresponding variables in /CMakeLists.txt. +// + +//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN +// token stringification +#define RAPIDJSON_STRINGIFY(x) RAPIDJSON_DO_STRINGIFY(x) +#define RAPIDJSON_DO_STRINGIFY(x) #x +//!@endcond + +/*! \def RAPIDJSON_MAJOR_VERSION + \ingroup RAPIDJSON_CONFIG + \brief Major version of RapidJSON in integer. +*/ +/*! \def RAPIDJSON_MINOR_VERSION + \ingroup RAPIDJSON_CONFIG + \brief Minor version of RapidJSON in integer. +*/ +/*! \def RAPIDJSON_PATCH_VERSION + \ingroup RAPIDJSON_CONFIG + \brief Patch version of RapidJSON in integer. +*/ +/*! \def RAPIDJSON_VERSION_STRING + \ingroup RAPIDJSON_CONFIG + \brief Version of RapidJSON in ".." string format. +*/ +#define RAPIDJSON_MAJOR_VERSION 1 +#define RAPIDJSON_MINOR_VERSION 1 +#define RAPIDJSON_PATCH_VERSION 0 +#define RAPIDJSON_VERSION_STRING \ + RAPIDJSON_STRINGIFY(RAPIDJSON_MAJOR_VERSION.RAPIDJSON_MINOR_VERSION.RAPIDJSON_PATCH_VERSION) + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_NAMESPACE_(BEGIN|END) +/*! \def RAPIDJSON_NAMESPACE + \ingroup RAPIDJSON_CONFIG + \brief provide custom rapidjson namespace + + In order to avoid symbol clashes and/or "One Definition Rule" errors + between multiple inclusions of (different versions of) RapidJSON in + a single binary, users can customize the name of the main RapidJSON + namespace. + + In case of a single nesting level, defining \c RAPIDJSON_NAMESPACE + to a custom name (e.g. \c MyRapidJSON) is sufficient. If multiple + levels are needed, both \ref RAPIDJSON_NAMESPACE_BEGIN and \ref + RAPIDJSON_NAMESPACE_END need to be defined as well: + + \code + // in some .cpp file + #define RAPIDJSON_NAMESPACE my::rapidjson + #define RAPIDJSON_NAMESPACE_BEGIN namespace my { namespace rapidjson { + #define RAPIDJSON_NAMESPACE_END } } + #include "rapidjson/..." + \endcode + + \see rapidjson + */ +/*! \def RAPIDJSON_NAMESPACE_BEGIN + \ingroup RAPIDJSON_CONFIG + \brief provide custom rapidjson namespace (opening expression) + \see RAPIDJSON_NAMESPACE +*/ +/*! \def RAPIDJSON_NAMESPACE_END + \ingroup RAPIDJSON_CONFIG + \brief provide custom rapidjson namespace (closing expression) + \see RAPIDJSON_NAMESPACE +*/ +#ifndef RAPIDJSON_NAMESPACE +#define RAPIDJSON_NAMESPACE rapidjson +#endif +#ifndef RAPIDJSON_NAMESPACE_BEGIN +#define RAPIDJSON_NAMESPACE_BEGIN namespace RAPIDJSON_NAMESPACE { +#endif +#ifndef RAPIDJSON_NAMESPACE_END +#define RAPIDJSON_NAMESPACE_END } +#endif + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_HAS_STDSTRING + +#ifndef RAPIDJSON_HAS_STDSTRING +#ifdef RAPIDJSON_DOXYGEN_RUNNING +#define RAPIDJSON_HAS_STDSTRING 1 // force generation of documentation +#else +#define RAPIDJSON_HAS_STDSTRING 0 // no std::string support by default +#endif +/*! \def RAPIDJSON_HAS_STDSTRING + \ingroup RAPIDJSON_CONFIG + \brief Enable RapidJSON support for \c std::string + + By defining this preprocessor symbol to \c 1, several convenience functions for using + \ref rapidjson::GenericValue with \c std::string are enabled, especially + for construction and comparison. + + \hideinitializer +*/ +#endif // !defined(RAPIDJSON_HAS_STDSTRING) + +#if RAPIDJSON_HAS_STDSTRING +#include +#endif // RAPIDJSON_HAS_STDSTRING + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_NO_INT64DEFINE + +/*! \def RAPIDJSON_NO_INT64DEFINE + \ingroup RAPIDJSON_CONFIG + \brief Use external 64-bit integer types. + + RapidJSON requires the 64-bit integer types \c int64_t and \c uint64_t types + to be available at global scope. + + If users have their own definition, define RAPIDJSON_NO_INT64DEFINE to + prevent RapidJSON from defining its own types. +*/ +#ifndef RAPIDJSON_NO_INT64DEFINE +//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN +#if defined(_MSC_VER) && (_MSC_VER < 1800) // Visual Studio 2013 +#include "msinttypes/stdint.h" +#include "msinttypes/inttypes.h" +#else +// Other compilers should have this. +#include +#include +#endif +//!@endcond +#ifdef RAPIDJSON_DOXYGEN_RUNNING +#define RAPIDJSON_NO_INT64DEFINE +#endif +#endif // RAPIDJSON_NO_INT64TYPEDEF + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_FORCEINLINE + +#ifndef RAPIDJSON_FORCEINLINE +//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN +#if defined(_MSC_VER) && defined(NDEBUG) +#define RAPIDJSON_FORCEINLINE __forceinline +#elif defined(__GNUC__) && __GNUC__ >= 4 && defined(NDEBUG) +#define RAPIDJSON_FORCEINLINE __attribute__((always_inline)) +#else +#define RAPIDJSON_FORCEINLINE +#endif +//!@endcond +#endif // RAPIDJSON_FORCEINLINE + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_ENDIAN +#define RAPIDJSON_LITTLEENDIAN 0 //!< Little endian machine +#define RAPIDJSON_BIGENDIAN 1 //!< Big endian machine + +//! Endianness of the machine. +/*! + \def RAPIDJSON_ENDIAN + \ingroup RAPIDJSON_CONFIG + + GCC 4.6 provided macro for detecting endianness of the target machine. But other + compilers may not have this. User can define RAPIDJSON_ENDIAN to either + \ref RAPIDJSON_LITTLEENDIAN or \ref RAPIDJSON_BIGENDIAN. + + Default detection implemented with reference to + \li https://gcc.gnu.org/onlinedocs/gcc-4.6.0/cpp/Common-Predefined-Macros.html + \li http://www.boost.org/doc/libs/1_42_0/boost/detail/endian.hpp +*/ +#ifndef RAPIDJSON_ENDIAN +// Detect with GCC 4.6's macro +# ifdef __BYTE_ORDER__ +# if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ +# define RAPIDJSON_ENDIAN RAPIDJSON_LITTLEENDIAN +# elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ +# define RAPIDJSON_ENDIAN RAPIDJSON_BIGENDIAN +# else +# error Unknown machine endianess detected. User needs to define RAPIDJSON_ENDIAN. +# endif // __BYTE_ORDER__ +// Detect with GLIBC's endian.h +# elif defined(__GLIBC__) +# include +# if (__BYTE_ORDER == __LITTLE_ENDIAN) +# define RAPIDJSON_ENDIAN RAPIDJSON_LITTLEENDIAN +# elif (__BYTE_ORDER == __BIG_ENDIAN) +# define RAPIDJSON_ENDIAN RAPIDJSON_BIGENDIAN +# else +# error Unknown machine endianess detected. User needs to define RAPIDJSON_ENDIAN. +# endif // __GLIBC__ +// Detect with _LITTLE_ENDIAN and _BIG_ENDIAN macro +# elif defined(_LITTLE_ENDIAN) && !defined(_BIG_ENDIAN) +# define RAPIDJSON_ENDIAN RAPIDJSON_LITTLEENDIAN +# elif defined(_BIG_ENDIAN) && !defined(_LITTLE_ENDIAN) +# define RAPIDJSON_ENDIAN RAPIDJSON_BIGENDIAN +// Detect with architecture macros +# elif defined(__sparc) || defined(__sparc__) || defined(_POWER) || defined(__powerpc__) || defined(__ppc__) || defined(__hpux) || defined(__hppa) || defined(_MIPSEB) || defined(_POWER) || defined(__s390__) +# define RAPIDJSON_ENDIAN RAPIDJSON_BIGENDIAN +# elif defined(__i386__) || defined(__alpha__) || defined(__ia64) || defined(__ia64__) || defined(_M_IX86) || defined(_M_IA64) || defined(_M_ALPHA) || defined(__amd64) || defined(__amd64__) || defined(_M_AMD64) || defined(__x86_64) || defined(__x86_64__) || defined(_M_X64) || defined(__bfin__) +# define RAPIDJSON_ENDIAN RAPIDJSON_LITTLEENDIAN +# elif defined(_MSC_VER) && defined(_M_ARM) +# define RAPIDJSON_ENDIAN RAPIDJSON_LITTLEENDIAN +# elif defined(RAPIDJSON_DOXYGEN_RUNNING) +# define RAPIDJSON_ENDIAN +# else +# error Unknown machine endianess detected. User needs to define RAPIDJSON_ENDIAN. +# endif +#endif // RAPIDJSON_ENDIAN + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_64BIT + +//! Whether using 64-bit architecture +#ifndef RAPIDJSON_64BIT +#if defined(__LP64__) || (defined(__x86_64__) && defined(__ILP32__)) || defined(_WIN64) || defined(__EMSCRIPTEN__) +#define RAPIDJSON_64BIT 1 +#else +#define RAPIDJSON_64BIT 0 +#endif +#endif // RAPIDJSON_64BIT + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_ALIGN + +//! Data alignment of the machine. +/*! \ingroup RAPIDJSON_CONFIG + \param x pointer to align + + Some machines require strict data alignment. Currently the default uses 4 bytes + alignment on 32-bit platforms and 8 bytes alignment for 64-bit platforms. + User can customize by defining the RAPIDJSON_ALIGN function macro. +*/ +#ifndef RAPIDJSON_ALIGN +#if RAPIDJSON_64BIT == 1 +#define RAPIDJSON_ALIGN(x) (((x) + static_cast(7u)) & ~static_cast(7u)) +#else +#define RAPIDJSON_ALIGN(x) (((x) + 3u) & ~3u) +#endif +#endif + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_UINT64_C2 + +//! Construct a 64-bit literal by a pair of 32-bit integer. +/*! + 64-bit literal with or without ULL suffix is prone to compiler warnings. + UINT64_C() is C macro which cause compilation problems. + Use this macro to define 64-bit constants by a pair of 32-bit integer. +*/ +#ifndef RAPIDJSON_UINT64_C2 +#define RAPIDJSON_UINT64_C2(high32, low32) ((static_cast(high32) << 32) | static_cast(low32)) +#endif + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_48BITPOINTER_OPTIMIZATION + +//! Use only lower 48-bit address for some pointers. +/*! + \ingroup RAPIDJSON_CONFIG + + This optimization uses the fact that current X86-64 architecture only implement lower 48-bit virtual address. + The higher 16-bit can be used for storing other data. + \c GenericValue uses this optimization to reduce its size form 24 bytes to 16 bytes in 64-bit architecture. +*/ +#ifndef RAPIDJSON_48BITPOINTER_OPTIMIZATION +#if defined(__amd64__) || defined(__amd64) || defined(__x86_64__) || defined(__x86_64) || defined(_M_X64) || defined(_M_AMD64) +#define RAPIDJSON_48BITPOINTER_OPTIMIZATION 1 +#else +#define RAPIDJSON_48BITPOINTER_OPTIMIZATION 0 +#endif +#endif // RAPIDJSON_48BITPOINTER_OPTIMIZATION + +#if RAPIDJSON_48BITPOINTER_OPTIMIZATION == 1 +#if RAPIDJSON_64BIT != 1 +#error RAPIDJSON_48BITPOINTER_OPTIMIZATION can only be set to 1 when RAPIDJSON_64BIT=1 +#endif +#define RAPIDJSON_SETPOINTER(type, p, x) (p = reinterpret_cast((reinterpret_cast(p) & static_cast(RAPIDJSON_UINT64_C2(0xFFFF0000, 0x00000000))) | reinterpret_cast(reinterpret_cast(x)))) +#define RAPIDJSON_GETPOINTER(type, p) (reinterpret_cast(reinterpret_cast(p) & static_cast(RAPIDJSON_UINT64_C2(0x0000FFFF, 0xFFFFFFFF)))) +#else +#define RAPIDJSON_SETPOINTER(type, p, x) (p = (x)) +#define RAPIDJSON_GETPOINTER(type, p) (p) +#endif + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_SSE2/RAPIDJSON_SSE42/RAPIDJSON_SIMD + +/*! \def RAPIDJSON_SIMD + \ingroup RAPIDJSON_CONFIG + \brief Enable SSE2/SSE4.2 optimization. + + RapidJSON supports optimized implementations for some parsing operations + based on the SSE2 or SSE4.2 SIMD extensions on modern Intel-compatible + processors. + + To enable these optimizations, two different symbols can be defined; + \code + // Enable SSE2 optimization. + #define RAPIDJSON_SSE2 + + // Enable SSE4.2 optimization. + #define RAPIDJSON_SSE42 + \endcode + + \c RAPIDJSON_SSE42 takes precedence, if both are defined. + + If any of these symbols is defined, RapidJSON defines the macro + \c RAPIDJSON_SIMD to indicate the availability of the optimized code. +*/ +#if defined(RAPIDJSON_SSE2) || defined(RAPIDJSON_SSE42) \ + || defined(RAPIDJSON_DOXYGEN_RUNNING) +#define RAPIDJSON_SIMD +#endif + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_NO_SIZETYPEDEFINE + +#ifndef RAPIDJSON_NO_SIZETYPEDEFINE +/*! \def RAPIDJSON_NO_SIZETYPEDEFINE + \ingroup RAPIDJSON_CONFIG + \brief User-provided \c SizeType definition. + + In order to avoid using 32-bit size types for indexing strings and arrays, + define this preprocessor symbol and provide the type rapidjson::SizeType + before including RapidJSON: + \code + #define RAPIDJSON_NO_SIZETYPEDEFINE + namespace rapidjson { typedef ::std::size_t SizeType; } + #include "rapidjson/..." + \endcode + + \see rapidjson::SizeType +*/ +#ifdef RAPIDJSON_DOXYGEN_RUNNING +#define RAPIDJSON_NO_SIZETYPEDEFINE +#endif +RAPIDJSON_NAMESPACE_BEGIN +//! Size type (for string lengths, array sizes, etc.) +/*! RapidJSON uses 32-bit array/string indices even on 64-bit platforms, + instead of using \c size_t. Users may override the SizeType by defining + \ref RAPIDJSON_NO_SIZETYPEDEFINE. +*/ +typedef unsigned SizeType; +RAPIDJSON_NAMESPACE_END +#endif + +// always import std::size_t to rapidjson namespace +RAPIDJSON_NAMESPACE_BEGIN +using std::size_t; +RAPIDJSON_NAMESPACE_END + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_ASSERT + +//! Assertion. +/*! \ingroup RAPIDJSON_CONFIG + By default, rapidjson uses C \c assert() for internal assertions. + User can override it by defining RAPIDJSON_ASSERT(x) macro. + + \note Parsing errors are handled and can be customized by the + \ref RAPIDJSON_ERRORS APIs. +*/ +#ifndef RAPIDJSON_ASSERT +#include +#define RAPIDJSON_ASSERT(x) assert(x) +#endif // RAPIDJSON_ASSERT + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_STATIC_ASSERT + +// Adopt from boost +#ifndef RAPIDJSON_STATIC_ASSERT +#ifndef __clang__ +//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN +#endif +RAPIDJSON_NAMESPACE_BEGIN +template struct STATIC_ASSERTION_FAILURE; +template <> struct STATIC_ASSERTION_FAILURE { enum { value = 1 }; }; +template struct StaticAssertTest {}; +RAPIDJSON_NAMESPACE_END + +#define RAPIDJSON_JOIN(X, Y) RAPIDJSON_DO_JOIN(X, Y) +#define RAPIDJSON_DO_JOIN(X, Y) RAPIDJSON_DO_JOIN2(X, Y) +#define RAPIDJSON_DO_JOIN2(X, Y) X##Y + +#if defined(__GNUC__) +#define RAPIDJSON_STATIC_ASSERT_UNUSED_ATTRIBUTE __attribute__((unused)) +#else +#define RAPIDJSON_STATIC_ASSERT_UNUSED_ATTRIBUTE +#endif +#ifndef __clang__ +//!@endcond +#endif + +/*! \def RAPIDJSON_STATIC_ASSERT + \brief (Internal) macro to check for conditions at compile-time + \param x compile-time condition + \hideinitializer + */ +#define RAPIDJSON_STATIC_ASSERT(x) \ + typedef ::RAPIDJSON_NAMESPACE::StaticAssertTest< \ + sizeof(::RAPIDJSON_NAMESPACE::STATIC_ASSERTION_FAILURE)> \ + RAPIDJSON_JOIN(StaticAssertTypedef, __LINE__) RAPIDJSON_STATIC_ASSERT_UNUSED_ATTRIBUTE +#endif + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_LIKELY, RAPIDJSON_UNLIKELY + +//! Compiler branching hint for expression with high probability to be true. +/*! + \ingroup RAPIDJSON_CONFIG + \param x Boolean expression likely to be true. +*/ +#ifndef RAPIDJSON_LIKELY +#if defined(__GNUC__) || defined(__clang__) +#define RAPIDJSON_LIKELY(x) __builtin_expect(!!(x), 1) +#else +#define RAPIDJSON_LIKELY(x) (x) +#endif +#endif + +//! Compiler branching hint for expression with low probability to be true. +/*! + \ingroup RAPIDJSON_CONFIG + \param x Boolean expression unlikely to be true. +*/ +#ifndef RAPIDJSON_UNLIKELY +#if defined(__GNUC__) || defined(__clang__) +#define RAPIDJSON_UNLIKELY(x) __builtin_expect(!!(x), 0) +#else +#define RAPIDJSON_UNLIKELY(x) (x) +#endif +#endif + +/////////////////////////////////////////////////////////////////////////////// +// Helpers + +//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN + +#define RAPIDJSON_MULTILINEMACRO_BEGIN do { +#define RAPIDJSON_MULTILINEMACRO_END \ +} while((void)0, 0) + +// adopted from Boost +#define RAPIDJSON_VERSION_CODE(x,y,z) \ + (((x)*100000) + ((y)*100) + (z)) + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_DIAG_PUSH/POP, RAPIDJSON_DIAG_OFF + +#if defined(__GNUC__) +#define RAPIDJSON_GNUC \ + RAPIDJSON_VERSION_CODE(__GNUC__,__GNUC_MINOR__,__GNUC_PATCHLEVEL__) +#endif + +#if defined(__clang__) || (defined(RAPIDJSON_GNUC) && RAPIDJSON_GNUC >= RAPIDJSON_VERSION_CODE(4,2,0)) + +#define RAPIDJSON_PRAGMA(x) _Pragma(RAPIDJSON_STRINGIFY(x)) +#define RAPIDJSON_DIAG_PRAGMA(x) RAPIDJSON_PRAGMA(GCC diagnostic x) +#define RAPIDJSON_DIAG_OFF(x) \ + RAPIDJSON_DIAG_PRAGMA(ignored RAPIDJSON_STRINGIFY(RAPIDJSON_JOIN(-W,x))) + +// push/pop support in Clang and GCC>=4.6 +#if defined(__clang__) || (defined(RAPIDJSON_GNUC) && RAPIDJSON_GNUC >= RAPIDJSON_VERSION_CODE(4,6,0)) +#define RAPIDJSON_DIAG_PUSH RAPIDJSON_DIAG_PRAGMA(push) +#define RAPIDJSON_DIAG_POP RAPIDJSON_DIAG_PRAGMA(pop) +#else // GCC >= 4.2, < 4.6 +#define RAPIDJSON_DIAG_PUSH /* ignored */ +#define RAPIDJSON_DIAG_POP /* ignored */ +#endif + +#elif defined(_MSC_VER) + +// pragma (MSVC specific) +#define RAPIDJSON_PRAGMA(x) __pragma(x) +#define RAPIDJSON_DIAG_PRAGMA(x) RAPIDJSON_PRAGMA(warning(x)) + +#define RAPIDJSON_DIAG_OFF(x) RAPIDJSON_DIAG_PRAGMA(disable: x) +#define RAPIDJSON_DIAG_PUSH RAPIDJSON_DIAG_PRAGMA(push) +#define RAPIDJSON_DIAG_POP RAPIDJSON_DIAG_PRAGMA(pop) + +#else + +#define RAPIDJSON_DIAG_OFF(x) /* ignored */ +#define RAPIDJSON_DIAG_PUSH /* ignored */ +#define RAPIDJSON_DIAG_POP /* ignored */ + +#endif // RAPIDJSON_DIAG_* + +/////////////////////////////////////////////////////////////////////////////// +// C++11 features + +#ifndef RAPIDJSON_HAS_CXX11_RVALUE_REFS +#if defined(__clang__) +#if __has_feature(cxx_rvalue_references) && \ + (defined(_LIBCPP_VERSION) || defined(__GLIBCXX__) && __GLIBCXX__ >= 20080306) +#define RAPIDJSON_HAS_CXX11_RVALUE_REFS 1 +#else +#define RAPIDJSON_HAS_CXX11_RVALUE_REFS 0 +#endif +#elif (defined(RAPIDJSON_GNUC) && (RAPIDJSON_GNUC >= RAPIDJSON_VERSION_CODE(4,3,0)) && defined(__GXX_EXPERIMENTAL_CXX0X__)) || \ + (defined(_MSC_VER) && _MSC_VER >= 1600) + +#define RAPIDJSON_HAS_CXX11_RVALUE_REFS 1 +#else +#define RAPIDJSON_HAS_CXX11_RVALUE_REFS 0 +#endif +#endif // RAPIDJSON_HAS_CXX11_RVALUE_REFS + +#ifndef RAPIDJSON_HAS_CXX11_NOEXCEPT +#if defined(__clang__) +#define RAPIDJSON_HAS_CXX11_NOEXCEPT __has_feature(cxx_noexcept) +#elif (defined(RAPIDJSON_GNUC) && (RAPIDJSON_GNUC >= RAPIDJSON_VERSION_CODE(4,6,0)) && defined(__GXX_EXPERIMENTAL_CXX0X__)) +// (defined(_MSC_VER) && _MSC_VER >= ????) // not yet supported +#define RAPIDJSON_HAS_CXX11_NOEXCEPT 1 +#else +#define RAPIDJSON_HAS_CXX11_NOEXCEPT 0 +#endif +#endif +#if RAPIDJSON_HAS_CXX11_NOEXCEPT +#define RAPIDJSON_NOEXCEPT noexcept +#else +#define RAPIDJSON_NOEXCEPT /* noexcept */ +#endif // RAPIDJSON_HAS_CXX11_NOEXCEPT + +// no automatic detection, yet +#ifndef RAPIDJSON_HAS_CXX11_TYPETRAITS +#define RAPIDJSON_HAS_CXX11_TYPETRAITS 0 +#endif + +#ifndef RAPIDJSON_HAS_CXX11_RANGE_FOR +#if defined(__clang__) +#define RAPIDJSON_HAS_CXX11_RANGE_FOR __has_feature(cxx_range_for) +#elif (defined(RAPIDJSON_GNUC) && (RAPIDJSON_GNUC >= RAPIDJSON_VERSION_CODE(4,3,0)) && defined(__GXX_EXPERIMENTAL_CXX0X__)) || \ + (defined(_MSC_VER) && _MSC_VER >= 1700) +#define RAPIDJSON_HAS_CXX11_RANGE_FOR 1 +#else +#define RAPIDJSON_HAS_CXX11_RANGE_FOR 0 +#endif +#endif // RAPIDJSON_HAS_CXX11_RANGE_FOR + +//!@endcond + +/////////////////////////////////////////////////////////////////////////////// +// new/delete + +#ifndef RAPIDJSON_NEW +///! customization point for global \c new +#define RAPIDJSON_NEW(x) new x +#endif +#ifndef RAPIDJSON_DELETE +///! customization point for global \c delete +#define RAPIDJSON_DELETE(x) delete x +#endif + +/////////////////////////////////////////////////////////////////////////////// +// Type + +/*! \namespace rapidjson + \brief main RapidJSON namespace + \see RAPIDJSON_NAMESPACE +*/ +RAPIDJSON_NAMESPACE_BEGIN + +//! Type of JSON value +enum Type { + kNullType = 0, //!< null + kFalseType = 1, //!< false + kTrueType = 2, //!< true + kObjectType = 3, //!< object + kArrayType = 4, //!< array + kStringType = 5, //!< string + kNumberType = 6 //!< number +}; + +RAPIDJSON_NAMESPACE_END + +#endif // RAPIDJSON_RAPIDJSON_H_ diff --git a/include/rapidjson/reader.h b/include/rapidjson/reader.h new file mode 100644 index 0000000..19f8849 --- /dev/null +++ b/include/rapidjson/reader.h @@ -0,0 +1,1879 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_READER_H_ +#define RAPIDJSON_READER_H_ + +/*! \file reader.h */ + +#include "allocators.h" +#include "stream.h" +#include "encodedstream.h" +#include "internal/meta.h" +#include "internal/stack.h" +#include "internal/strtod.h" +#include + +#if defined(RAPIDJSON_SIMD) && defined(_MSC_VER) +#include +#pragma intrinsic(_BitScanForward) +#endif +#ifdef RAPIDJSON_SSE42 +#include +#elif defined(RAPIDJSON_SSE2) +#include +#endif + +#ifdef _MSC_VER +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(4127) // conditional expression is constant +RAPIDJSON_DIAG_OFF(4702) // unreachable code +#endif + +#ifdef __clang__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(old-style-cast) +RAPIDJSON_DIAG_OFF(padded) +RAPIDJSON_DIAG_OFF(switch-enum) +#endif + +#ifdef __GNUC__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(effc++) +#endif + +//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN +#define RAPIDJSON_NOTHING /* deliberately empty */ +#ifndef RAPIDJSON_PARSE_ERROR_EARLY_RETURN +#define RAPIDJSON_PARSE_ERROR_EARLY_RETURN(value) \ + RAPIDJSON_MULTILINEMACRO_BEGIN \ + if (RAPIDJSON_UNLIKELY(HasParseError())) { return value; } \ + RAPIDJSON_MULTILINEMACRO_END +#endif +#define RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID \ + RAPIDJSON_PARSE_ERROR_EARLY_RETURN(RAPIDJSON_NOTHING) +//!@endcond + +/*! \def RAPIDJSON_PARSE_ERROR_NORETURN + \ingroup RAPIDJSON_ERRORS + \brief Macro to indicate a parse error. + \param parseErrorCode \ref rapidjson::ParseErrorCode of the error + \param offset position of the error in JSON input (\c size_t) + + This macros can be used as a customization point for the internal + error handling mechanism of RapidJSON. + + A common usage model is to throw an exception instead of requiring the + caller to explicitly check the \ref rapidjson::GenericReader::Parse's + return value: + + \code + #define RAPIDJSON_PARSE_ERROR_NORETURN(parseErrorCode,offset) \ + throw ParseException(parseErrorCode, #parseErrorCode, offset) + + #include // std::runtime_error + #include "rapidjson/error/error.h" // rapidjson::ParseResult + + struct ParseException : std::runtime_error, rapidjson::ParseResult { + ParseException(rapidjson::ParseErrorCode code, const char* msg, size_t offset) + : std::runtime_error(msg), ParseResult(code, offset) {} + }; + + #include "rapidjson/reader.h" + \endcode + + \see RAPIDJSON_PARSE_ERROR, rapidjson::GenericReader::Parse + */ +#ifndef RAPIDJSON_PARSE_ERROR_NORETURN +#define RAPIDJSON_PARSE_ERROR_NORETURN(parseErrorCode, offset) \ + RAPIDJSON_MULTILINEMACRO_BEGIN \ + RAPIDJSON_ASSERT(!HasParseError()); /* Error can only be assigned once */ \ + SetParseError(parseErrorCode, offset); \ + RAPIDJSON_MULTILINEMACRO_END +#endif + +/*! \def RAPIDJSON_PARSE_ERROR + \ingroup RAPIDJSON_ERRORS + \brief (Internal) macro to indicate and handle a parse error. + \param parseErrorCode \ref rapidjson::ParseErrorCode of the error + \param offset position of the error in JSON input (\c size_t) + + Invokes RAPIDJSON_PARSE_ERROR_NORETURN and stops the parsing. + + \see RAPIDJSON_PARSE_ERROR_NORETURN + \hideinitializer + */ +#ifndef RAPIDJSON_PARSE_ERROR +#define RAPIDJSON_PARSE_ERROR(parseErrorCode, offset) \ + RAPIDJSON_MULTILINEMACRO_BEGIN \ + RAPIDJSON_PARSE_ERROR_NORETURN(parseErrorCode, offset); \ + RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; \ + RAPIDJSON_MULTILINEMACRO_END +#endif + +#include "error/error.h" // ParseErrorCode, ParseResult + +RAPIDJSON_NAMESPACE_BEGIN + +/////////////////////////////////////////////////////////////////////////////// +// ParseFlag + +/*! \def RAPIDJSON_PARSE_DEFAULT_FLAGS + \ingroup RAPIDJSON_CONFIG + \brief User-defined kParseDefaultFlags definition. + + User can define this as any \c ParseFlag combinations. +*/ +#ifndef RAPIDJSON_PARSE_DEFAULT_FLAGS +#define RAPIDJSON_PARSE_DEFAULT_FLAGS kParseNoFlags +#endif + +//! Combination of parseFlags +/*! \see Reader::Parse, Document::Parse, Document::ParseInsitu, Document::ParseStream + */ +enum ParseFlag { + kParseNoFlags = 0, //!< No flags are set. + kParseInsituFlag = 1, //!< In-situ(destructive) parsing. + kParseValidateEncodingFlag = 2, //!< Validate encoding of JSON strings. + kParseIterativeFlag = 4, //!< Iterative(constant complexity in terms of function call stack size) parsing. + kParseStopWhenDoneFlag = 8, //!< After parsing a complete JSON root from stream, stop further processing the rest of stream. When this flag is used, parser will not generate kParseErrorDocumentRootNotSingular error. + kParseFullPrecisionFlag = 16, //!< Parse number in full precision (but slower). + kParseCommentsFlag = 32, //!< Allow one-line (//) and multi-line (/**/) comments. + kParseNumbersAsStringsFlag = 64, //!< Parse all numbers (ints/doubles) as strings. + kParseTrailingCommasFlag = 128, //!< Allow trailing commas at the end of objects and arrays. + kParseNanAndInfFlag = 256, //!< Allow parsing NaN, Inf, Infinity, -Inf and -Infinity as doubles. + kParseDefaultFlags = RAPIDJSON_PARSE_DEFAULT_FLAGS //!< Default parse flags. Can be customized by defining RAPIDJSON_PARSE_DEFAULT_FLAGS +}; + +/////////////////////////////////////////////////////////////////////////////// +// Handler + +/*! \class rapidjson::Handler + \brief Concept for receiving events from GenericReader upon parsing. + The functions return true if no error occurs. If they return false, + the event publisher should terminate the process. +\code +concept Handler { + typename Ch; + + bool Null(); + bool Bool(bool b); + bool Int(int i); + bool Uint(unsigned i); + bool Int64(int64_t i); + bool Uint64(uint64_t i); + bool Double(double d); + /// enabled via kParseNumbersAsStringsFlag, string is not null-terminated (use length) + bool RawNumber(const Ch* str, SizeType length, bool copy); + bool String(const Ch* str, SizeType length, bool copy); + bool StartObject(); + bool Key(const Ch* str, SizeType length, bool copy); + bool EndObject(SizeType memberCount); + bool StartArray(); + bool EndArray(SizeType elementCount); +}; +\endcode +*/ +/////////////////////////////////////////////////////////////////////////////// +// BaseReaderHandler + +//! Default implementation of Handler. +/*! This can be used as base class of any reader handler. + \note implements Handler concept +*/ +template, typename Derived = void> +struct BaseReaderHandler { + typedef typename Encoding::Ch Ch; + + typedef typename internal::SelectIf, BaseReaderHandler, Derived>::Type Override; + + bool Default() { return true; } + bool Null() { return static_cast(*this).Default(); } + bool Bool(bool) { return static_cast(*this).Default(); } + bool Int(int) { return static_cast(*this).Default(); } + bool Uint(unsigned) { return static_cast(*this).Default(); } + bool Int64(int64_t) { return static_cast(*this).Default(); } + bool Uint64(uint64_t) { return static_cast(*this).Default(); } + bool Double(double) { return static_cast(*this).Default(); } + /// enabled via kParseNumbersAsStringsFlag, string is not null-terminated (use length) + bool RawNumber(const Ch* str, SizeType len, bool copy) { return static_cast(*this).String(str, len, copy); } + bool String(const Ch*, SizeType, bool) { return static_cast(*this).Default(); } + bool StartObject() { return static_cast(*this).Default(); } + bool Key(const Ch* str, SizeType len, bool copy) { return static_cast(*this).String(str, len, copy); } + bool EndObject(SizeType) { return static_cast(*this).Default(); } + bool StartArray() { return static_cast(*this).Default(); } + bool EndArray(SizeType) { return static_cast(*this).Default(); } +}; + +/////////////////////////////////////////////////////////////////////////////// +// StreamLocalCopy + +namespace internal { + +template::copyOptimization> +class StreamLocalCopy; + +//! Do copy optimization. +template +class StreamLocalCopy { +public: + StreamLocalCopy(Stream& original) : s(original), original_(original) {} + ~StreamLocalCopy() { original_ = s; } + + Stream s; + +private: + StreamLocalCopy& operator=(const StreamLocalCopy&) /* = delete */; + + Stream& original_; +}; + +//! Keep reference. +template +class StreamLocalCopy { +public: + StreamLocalCopy(Stream& original) : s(original) {} + + Stream& s; + +private: + StreamLocalCopy& operator=(const StreamLocalCopy&) /* = delete */; +}; + +} // namespace internal + +/////////////////////////////////////////////////////////////////////////////// +// SkipWhitespace + +//! Skip the JSON white spaces in a stream. +/*! \param is A input stream for skipping white spaces. + \note This function has SSE2/SSE4.2 specialization. +*/ +template +void SkipWhitespace(InputStream& is) { + internal::StreamLocalCopy copy(is); + InputStream& s(copy.s); + + typename InputStream::Ch c; + while ((c = s.Peek()) == ' ' || c == '\n' || c == '\r' || c == '\t') + s.Take(); +} + +inline const char* SkipWhitespace(const char* p, const char* end) { + while (p != end && (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t')) + ++p; + return p; +} + +#ifdef RAPIDJSON_SSE42 +//! Skip whitespace with SSE 4.2 pcmpistrm instruction, testing 16 8-byte characters at once. +inline const char *SkipWhitespace_SIMD(const char* p) { + // Fast return for single non-whitespace + if (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t') + ++p; + else + return p; + + // 16-byte align to the next boundary + const char* nextAligned = reinterpret_cast((reinterpret_cast(p) + 15) & static_cast(~15)); + while (p != nextAligned) + if (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t') + ++p; + else + return p; + + // The rest of string using SIMD + static const char whitespace[16] = " \n\r\t"; + const __m128i w = _mm_loadu_si128(reinterpret_cast(&whitespace[0])); + + for (;; p += 16) { + const __m128i s = _mm_load_si128(reinterpret_cast(p)); + const int r = _mm_cvtsi128_si32(_mm_cmpistrm(w, s, _SIDD_UBYTE_OPS | _SIDD_CMP_EQUAL_ANY | _SIDD_BIT_MASK | _SIDD_NEGATIVE_POLARITY)); + if (r != 0) { // some of characters is non-whitespace +#ifdef _MSC_VER // Find the index of first non-whitespace + unsigned long offset; + _BitScanForward(&offset, r); + return p + offset; +#else + return p + __builtin_ffs(r) - 1; +#endif + } + } +} + +inline const char *SkipWhitespace_SIMD(const char* p, const char* end) { + // Fast return for single non-whitespace + if (p != end && (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t')) + ++p; + else + return p; + + // The middle of string using SIMD + static const char whitespace[16] = " \n\r\t"; + const __m128i w = _mm_loadu_si128(reinterpret_cast(&whitespace[0])); + + for (; p <= end - 16; p += 16) { + const __m128i s = _mm_loadu_si128(reinterpret_cast(p)); + const int r = _mm_cvtsi128_si32(_mm_cmpistrm(w, s, _SIDD_UBYTE_OPS | _SIDD_CMP_EQUAL_ANY | _SIDD_BIT_MASK | _SIDD_NEGATIVE_POLARITY)); + if (r != 0) { // some of characters is non-whitespace +#ifdef _MSC_VER // Find the index of first non-whitespace + unsigned long offset; + _BitScanForward(&offset, r); + return p + offset; +#else + return p + __builtin_ffs(r) - 1; +#endif + } + } + + return SkipWhitespace(p, end); +} + +#elif defined(RAPIDJSON_SSE2) + +//! Skip whitespace with SSE2 instructions, testing 16 8-byte characters at once. +inline const char *SkipWhitespace_SIMD(const char* p) { + // Fast return for single non-whitespace + if (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t') + ++p; + else + return p; + + // 16-byte align to the next boundary + const char* nextAligned = reinterpret_cast((reinterpret_cast(p) + 15) & static_cast(~15)); + while (p != nextAligned) + if (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t') + ++p; + else + return p; + + // The rest of string + #define C16(c) { c, c, c, c, c, c, c, c, c, c, c, c, c, c, c, c } + static const char whitespaces[4][16] = { C16(' '), C16('\n'), C16('\r'), C16('\t') }; + #undef C16 + + const __m128i w0 = _mm_loadu_si128(reinterpret_cast(&whitespaces[0][0])); + const __m128i w1 = _mm_loadu_si128(reinterpret_cast(&whitespaces[1][0])); + const __m128i w2 = _mm_loadu_si128(reinterpret_cast(&whitespaces[2][0])); + const __m128i w3 = _mm_loadu_si128(reinterpret_cast(&whitespaces[3][0])); + + for (;; p += 16) { + const __m128i s = _mm_load_si128(reinterpret_cast(p)); + __m128i x = _mm_cmpeq_epi8(s, w0); + x = _mm_or_si128(x, _mm_cmpeq_epi8(s, w1)); + x = _mm_or_si128(x, _mm_cmpeq_epi8(s, w2)); + x = _mm_or_si128(x, _mm_cmpeq_epi8(s, w3)); + unsigned short r = static_cast(~_mm_movemask_epi8(x)); + if (r != 0) { // some of characters may be non-whitespace +#ifdef _MSC_VER // Find the index of first non-whitespace + unsigned long offset; + _BitScanForward(&offset, r); + return p + offset; +#else + return p + __builtin_ffs(r) - 1; +#endif + } + } +} + +inline const char *SkipWhitespace_SIMD(const char* p, const char* end) { + // Fast return for single non-whitespace + if (p != end && (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t')) + ++p; + else + return p; + + // The rest of string + #define C16(c) { c, c, c, c, c, c, c, c, c, c, c, c, c, c, c, c } + static const char whitespaces[4][16] = { C16(' '), C16('\n'), C16('\r'), C16('\t') }; + #undef C16 + + const __m128i w0 = _mm_loadu_si128(reinterpret_cast(&whitespaces[0][0])); + const __m128i w1 = _mm_loadu_si128(reinterpret_cast(&whitespaces[1][0])); + const __m128i w2 = _mm_loadu_si128(reinterpret_cast(&whitespaces[2][0])); + const __m128i w3 = _mm_loadu_si128(reinterpret_cast(&whitespaces[3][0])); + + for (; p <= end - 16; p += 16) { + const __m128i s = _mm_loadu_si128(reinterpret_cast(p)); + __m128i x = _mm_cmpeq_epi8(s, w0); + x = _mm_or_si128(x, _mm_cmpeq_epi8(s, w1)); + x = _mm_or_si128(x, _mm_cmpeq_epi8(s, w2)); + x = _mm_or_si128(x, _mm_cmpeq_epi8(s, w3)); + unsigned short r = static_cast(~_mm_movemask_epi8(x)); + if (r != 0) { // some of characters may be non-whitespace +#ifdef _MSC_VER // Find the index of first non-whitespace + unsigned long offset; + _BitScanForward(&offset, r); + return p + offset; +#else + return p + __builtin_ffs(r) - 1; +#endif + } + } + + return SkipWhitespace(p, end); +} + +#endif // RAPIDJSON_SSE2 + +#ifdef RAPIDJSON_SIMD +//! Template function specialization for InsituStringStream +template<> inline void SkipWhitespace(InsituStringStream& is) { + is.src_ = const_cast(SkipWhitespace_SIMD(is.src_)); +} + +//! Template function specialization for StringStream +template<> inline void SkipWhitespace(StringStream& is) { + is.src_ = SkipWhitespace_SIMD(is.src_); +} + +template<> inline void SkipWhitespace(EncodedInputStream, MemoryStream>& is) { + is.is_.src_ = SkipWhitespace_SIMD(is.is_.src_, is.is_.end_); +} +#endif // RAPIDJSON_SIMD + +/////////////////////////////////////////////////////////////////////////////// +// GenericReader + +//! SAX-style JSON parser. Use \ref Reader for UTF8 encoding and default allocator. +/*! GenericReader parses JSON text from a stream, and send events synchronously to an + object implementing Handler concept. + + It needs to allocate a stack for storing a single decoded string during + non-destructive parsing. + + For in-situ parsing, the decoded string is directly written to the source + text string, no temporary buffer is required. + + A GenericReader object can be reused for parsing multiple JSON text. + + \tparam SourceEncoding Encoding of the input stream. + \tparam TargetEncoding Encoding of the parse output. + \tparam StackAllocator Allocator type for stack. +*/ +template +class GenericReader { +public: + typedef typename SourceEncoding::Ch Ch; //!< SourceEncoding character type + + //! Constructor. + /*! \param stackAllocator Optional allocator for allocating stack memory. (Only use for non-destructive parsing) + \param stackCapacity stack capacity in bytes for storing a single decoded string. (Only use for non-destructive parsing) + */ + GenericReader(StackAllocator* stackAllocator = 0, size_t stackCapacity = kDefaultStackCapacity) : stack_(stackAllocator, stackCapacity), parseResult_() {} + + //! Parse JSON text. + /*! \tparam parseFlags Combination of \ref ParseFlag. + \tparam InputStream Type of input stream, implementing Stream concept. + \tparam Handler Type of handler, implementing Handler concept. + \param is Input stream to be parsed. + \param handler The handler to receive events. + \return Whether the parsing is successful. + */ + template + ParseResult Parse(InputStream& is, Handler& handler) { + if (parseFlags & kParseIterativeFlag) + return IterativeParse(is, handler); + + parseResult_.Clear(); + + ClearStackOnExit scope(*this); + + SkipWhitespaceAndComments(is); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN(parseResult_); + + if (RAPIDJSON_UNLIKELY(is.Peek() == '\0')) { + RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorDocumentEmpty, is.Tell()); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN(parseResult_); + } + else { + ParseValue(is, handler); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN(parseResult_); + + if (!(parseFlags & kParseStopWhenDoneFlag)) { + SkipWhitespaceAndComments(is); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN(parseResult_); + + if (RAPIDJSON_UNLIKELY(is.Peek() != '\0')) { + RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorDocumentRootNotSingular, is.Tell()); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN(parseResult_); + } + } + } + + return parseResult_; + } + + //! Parse JSON text (with \ref kParseDefaultFlags) + /*! \tparam InputStream Type of input stream, implementing Stream concept + \tparam Handler Type of handler, implementing Handler concept. + \param is Input stream to be parsed. + \param handler The handler to receive events. + \return Whether the parsing is successful. + */ + template + ParseResult Parse(InputStream& is, Handler& handler) { + return Parse(is, handler); + } + + //! Whether a parse error has occured in the last parsing. + bool HasParseError() const { return parseResult_.IsError(); } + + //! Get the \ref ParseErrorCode of last parsing. + ParseErrorCode GetParseErrorCode() const { return parseResult_.Code(); } + + //! Get the position of last parsing error in input, 0 otherwise. + size_t GetErrorOffset() const { return parseResult_.Offset(); } + +protected: + void SetParseError(ParseErrorCode code, size_t offset) { parseResult_.Set(code, offset); } + +private: + // Prohibit copy constructor & assignment operator. + GenericReader(const GenericReader&); + GenericReader& operator=(const GenericReader&); + + void ClearStack() { stack_.Clear(); } + + // clear stack on any exit from ParseStream, e.g. due to exception + struct ClearStackOnExit { + explicit ClearStackOnExit(GenericReader& r) : r_(r) {} + ~ClearStackOnExit() { r_.ClearStack(); } + private: + GenericReader& r_; + ClearStackOnExit(const ClearStackOnExit&); + ClearStackOnExit& operator=(const ClearStackOnExit&); + }; + + template + void SkipWhitespaceAndComments(InputStream& is) { + SkipWhitespace(is); + + if (parseFlags & kParseCommentsFlag) { + while (RAPIDJSON_UNLIKELY(Consume(is, '/'))) { + if (Consume(is, '*')) { + while (true) { + if (RAPIDJSON_UNLIKELY(is.Peek() == '\0')) + RAPIDJSON_PARSE_ERROR(kParseErrorUnspecificSyntaxError, is.Tell()); + else if (Consume(is, '*')) { + if (Consume(is, '/')) + break; + } + else + is.Take(); + } + } + else if (RAPIDJSON_LIKELY(Consume(is, '/'))) + while (is.Peek() != '\0' && is.Take() != '\n'); + else + RAPIDJSON_PARSE_ERROR(kParseErrorUnspecificSyntaxError, is.Tell()); + + SkipWhitespace(is); + } + } + } + + // Parse object: { string : value, ... } + template + void ParseObject(InputStream& is, Handler& handler) { + RAPIDJSON_ASSERT(is.Peek() == '{'); + is.Take(); // Skip '{' + + if (RAPIDJSON_UNLIKELY(!handler.StartObject())) + RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell()); + + SkipWhitespaceAndComments(is); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; + + if (Consume(is, '}')) { + if (RAPIDJSON_UNLIKELY(!handler.EndObject(0))) // empty object + RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell()); + return; + } + + for (SizeType memberCount = 0;;) { + if (RAPIDJSON_UNLIKELY(is.Peek() != '"')) + RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissName, is.Tell()); + + ParseString(is, handler, true); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; + + SkipWhitespaceAndComments(is); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; + + if (RAPIDJSON_UNLIKELY(!Consume(is, ':'))) + RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissColon, is.Tell()); + + SkipWhitespaceAndComments(is); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; + + ParseValue(is, handler); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; + + SkipWhitespaceAndComments(is); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; + + ++memberCount; + + switch (is.Peek()) { + case ',': + is.Take(); + SkipWhitespaceAndComments(is); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; + break; + case '}': + is.Take(); + if (RAPIDJSON_UNLIKELY(!handler.EndObject(memberCount))) + RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell()); + return; + default: + RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissCommaOrCurlyBracket, is.Tell()); break; // This useless break is only for making warning and coverage happy + } + + if (parseFlags & kParseTrailingCommasFlag) { + if (is.Peek() == '}') { + if (RAPIDJSON_UNLIKELY(!handler.EndObject(memberCount))) + RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell()); + is.Take(); + return; + } + } + } + } + + // Parse array: [ value, ... ] + template + void ParseArray(InputStream& is, Handler& handler) { + RAPIDJSON_ASSERT(is.Peek() == '['); + is.Take(); // Skip '[' + + if (RAPIDJSON_UNLIKELY(!handler.StartArray())) + RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell()); + + SkipWhitespaceAndComments(is); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; + + if (Consume(is, ']')) { + if (RAPIDJSON_UNLIKELY(!handler.EndArray(0))) // empty array + RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell()); + return; + } + + for (SizeType elementCount = 0;;) { + ParseValue(is, handler); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; + + ++elementCount; + SkipWhitespaceAndComments(is); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; + + if (Consume(is, ',')) { + SkipWhitespaceAndComments(is); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; + } + else if (Consume(is, ']')) { + if (RAPIDJSON_UNLIKELY(!handler.EndArray(elementCount))) + RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell()); + return; + } + else + RAPIDJSON_PARSE_ERROR(kParseErrorArrayMissCommaOrSquareBracket, is.Tell()); + + if (parseFlags & kParseTrailingCommasFlag) { + if (is.Peek() == ']') { + if (RAPIDJSON_UNLIKELY(!handler.EndArray(elementCount))) + RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell()); + is.Take(); + return; + } + } + } + } + + template + void ParseNull(InputStream& is, Handler& handler) { + RAPIDJSON_ASSERT(is.Peek() == 'n'); + is.Take(); + + if (RAPIDJSON_LIKELY(Consume(is, 'u') && Consume(is, 'l') && Consume(is, 'l'))) { + if (RAPIDJSON_UNLIKELY(!handler.Null())) + RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell()); + } + else + RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, is.Tell()); + } + + template + void ParseTrue(InputStream& is, Handler& handler) { + RAPIDJSON_ASSERT(is.Peek() == 't'); + is.Take(); + + if (RAPIDJSON_LIKELY(Consume(is, 'r') && Consume(is, 'u') && Consume(is, 'e'))) { + if (RAPIDJSON_UNLIKELY(!handler.Bool(true))) + RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell()); + } + else + RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, is.Tell()); + } + + template + void ParseFalse(InputStream& is, Handler& handler) { + RAPIDJSON_ASSERT(is.Peek() == 'f'); + is.Take(); + + if (RAPIDJSON_LIKELY(Consume(is, 'a') && Consume(is, 'l') && Consume(is, 's') && Consume(is, 'e'))) { + if (RAPIDJSON_UNLIKELY(!handler.Bool(false))) + RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell()); + } + else + RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, is.Tell()); + } + + template + RAPIDJSON_FORCEINLINE static bool Consume(InputStream& is, typename InputStream::Ch expect) { + if (RAPIDJSON_LIKELY(is.Peek() == expect)) { + is.Take(); + return true; + } + else + return false; + } + + // Helper function to parse four hexidecimal digits in \uXXXX in ParseString(). + template + unsigned ParseHex4(InputStream& is, size_t escapeOffset) { + unsigned codepoint = 0; + for (int i = 0; i < 4; i++) { + Ch c = is.Peek(); + codepoint <<= 4; + codepoint += static_cast(c); + if (c >= '0' && c <= '9') + codepoint -= '0'; + else if (c >= 'A' && c <= 'F') + codepoint -= 'A' - 10; + else if (c >= 'a' && c <= 'f') + codepoint -= 'a' - 10; + else { + RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorStringUnicodeEscapeInvalidHex, escapeOffset); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN(0); + } + is.Take(); + } + return codepoint; + } + + template + class StackStream { + public: + typedef CharType Ch; + + StackStream(internal::Stack& stack) : stack_(stack), length_(0) {} + RAPIDJSON_FORCEINLINE void Put(Ch c) { + *stack_.template Push() = c; + ++length_; + } + + RAPIDJSON_FORCEINLINE void* Push(SizeType count) { + length_ += count; + return stack_.template Push(count); + } + + size_t Length() const { return length_; } + + Ch* Pop() { + return stack_.template Pop(length_); + } + + private: + StackStream(const StackStream&); + StackStream& operator=(const StackStream&); + + internal::Stack& stack_; + SizeType length_; + }; + + // Parse string and generate String event. Different code paths for kParseInsituFlag. + template + void ParseString(InputStream& is, Handler& handler, bool isKey = false) { + internal::StreamLocalCopy copy(is); + InputStream& s(copy.s); + + RAPIDJSON_ASSERT(s.Peek() == '\"'); + s.Take(); // Skip '\"' + + bool success = false; + if (parseFlags & kParseInsituFlag) { + typename InputStream::Ch *head = s.PutBegin(); + ParseStringToStream(s, s); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; + size_t length = s.PutEnd(head) - 1; + RAPIDJSON_ASSERT(length <= 0xFFFFFFFF); + const typename TargetEncoding::Ch* const str = reinterpret_cast(head); + success = (isKey ? handler.Key(str, SizeType(length), false) : handler.String(str, SizeType(length), false)); + } + else { + StackStream stackStream(stack_); + ParseStringToStream(s, stackStream); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; + SizeType length = static_cast(stackStream.Length()) - 1; + const typename TargetEncoding::Ch* const str = stackStream.Pop(); + success = (isKey ? handler.Key(str, length, true) : handler.String(str, length, true)); + } + if (RAPIDJSON_UNLIKELY(!success)) + RAPIDJSON_PARSE_ERROR(kParseErrorTermination, s.Tell()); + } + + // Parse string to an output is + // This function handles the prefix/suffix double quotes, escaping, and optional encoding validation. + template + RAPIDJSON_FORCEINLINE void ParseStringToStream(InputStream& is, OutputStream& os) { +//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN +#define Z16 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 + static const char escape[256] = { + Z16, Z16, 0, 0,'\"', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,'/', + Z16, Z16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,'\\', 0, 0, 0, + 0, 0,'\b', 0, 0, 0,'\f', 0, 0, 0, 0, 0, 0, 0,'\n', 0, + 0, 0,'\r', 0,'\t', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + Z16, Z16, Z16, Z16, Z16, Z16, Z16, Z16 + }; +#undef Z16 +//!@endcond + + for (;;) { + // Scan and copy string before "\\\"" or < 0x20. This is an optional optimzation. + if (!(parseFlags & kParseValidateEncodingFlag)) + ScanCopyUnescapedString(is, os); + + Ch c = is.Peek(); + if (RAPIDJSON_UNLIKELY(c == '\\')) { // Escape + size_t escapeOffset = is.Tell(); // For invalid escaping, report the inital '\\' as error offset + is.Take(); + Ch e = is.Peek(); + if ((sizeof(Ch) == 1 || unsigned(e) < 256) && RAPIDJSON_LIKELY(escape[static_cast(e)])) { + is.Take(); + os.Put(static_cast(escape[static_cast(e)])); + } + else if (RAPIDJSON_LIKELY(e == 'u')) { // Unicode + is.Take(); + unsigned codepoint = ParseHex4(is, escapeOffset); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; + if (RAPIDJSON_UNLIKELY(codepoint >= 0xD800 && codepoint <= 0xDBFF)) { + // Handle UTF-16 surrogate pair + if (RAPIDJSON_UNLIKELY(!Consume(is, '\\') || !Consume(is, 'u'))) + RAPIDJSON_PARSE_ERROR(kParseErrorStringUnicodeSurrogateInvalid, escapeOffset); + unsigned codepoint2 = ParseHex4(is, escapeOffset); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; + if (RAPIDJSON_UNLIKELY(codepoint2 < 0xDC00 || codepoint2 > 0xDFFF)) + RAPIDJSON_PARSE_ERROR(kParseErrorStringUnicodeSurrogateInvalid, escapeOffset); + codepoint = (((codepoint - 0xD800) << 10) | (codepoint2 - 0xDC00)) + 0x10000; + } + TEncoding::Encode(os, codepoint); + } + else + RAPIDJSON_PARSE_ERROR(kParseErrorStringEscapeInvalid, escapeOffset); + } + else if (RAPIDJSON_UNLIKELY(c == '"')) { // Closing double quote + is.Take(); + os.Put('\0'); // null-terminate the string + return; + } + else if (RAPIDJSON_UNLIKELY(static_cast(c) < 0x20)) { // RFC 4627: unescaped = %x20-21 / %x23-5B / %x5D-10FFFF + if (c == '\0') + RAPIDJSON_PARSE_ERROR(kParseErrorStringMissQuotationMark, is.Tell()); + else + RAPIDJSON_PARSE_ERROR(kParseErrorStringEscapeInvalid, is.Tell()); + } + else { + size_t offset = is.Tell(); + if (RAPIDJSON_UNLIKELY((parseFlags & kParseValidateEncodingFlag ? + !Transcoder::Validate(is, os) : + !Transcoder::Transcode(is, os)))) + RAPIDJSON_PARSE_ERROR(kParseErrorStringInvalidEncoding, offset); + } + } + } + + template + static RAPIDJSON_FORCEINLINE void ScanCopyUnescapedString(InputStream&, OutputStream&) { + // Do nothing for generic version + } + +#if defined(RAPIDJSON_SSE2) || defined(RAPIDJSON_SSE42) + // StringStream -> StackStream + static RAPIDJSON_FORCEINLINE void ScanCopyUnescapedString(StringStream& is, StackStream& os) { + const char* p = is.src_; + + // Scan one by one until alignment (unaligned load may cross page boundary and cause crash) + const char* nextAligned = reinterpret_cast((reinterpret_cast(p) + 15) & static_cast(~15)); + while (p != nextAligned) + if (RAPIDJSON_UNLIKELY(*p == '\"') || RAPIDJSON_UNLIKELY(*p == '\\') || RAPIDJSON_UNLIKELY(static_cast(*p) < 0x20)) { + is.src_ = p; + return; + } + else + os.Put(*p++); + + // The rest of string using SIMD + static const char dquote[16] = { '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"' }; + static const char bslash[16] = { '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\' }; + static const char space[16] = { 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19 }; + const __m128i dq = _mm_loadu_si128(reinterpret_cast(&dquote[0])); + const __m128i bs = _mm_loadu_si128(reinterpret_cast(&bslash[0])); + const __m128i sp = _mm_loadu_si128(reinterpret_cast(&space[0])); + + for (;; p += 16) { + const __m128i s = _mm_load_si128(reinterpret_cast(p)); + const __m128i t1 = _mm_cmpeq_epi8(s, dq); + const __m128i t2 = _mm_cmpeq_epi8(s, bs); + const __m128i t3 = _mm_cmpeq_epi8(_mm_max_epu8(s, sp), sp); // s < 0x20 <=> max(s, 0x19) == 0x19 + const __m128i x = _mm_or_si128(_mm_or_si128(t1, t2), t3); + unsigned short r = static_cast(_mm_movemask_epi8(x)); + if (RAPIDJSON_UNLIKELY(r != 0)) { // some of characters is escaped + SizeType length; + #ifdef _MSC_VER // Find the index of first escaped + unsigned long offset; + _BitScanForward(&offset, r); + length = offset; + #else + length = static_cast(__builtin_ffs(r) - 1); + #endif + char* q = reinterpret_cast(os.Push(length)); + for (size_t i = 0; i < length; i++) + q[i] = p[i]; + + p += length; + break; + } + _mm_storeu_si128(reinterpret_cast<__m128i *>(os.Push(16)), s); + } + + is.src_ = p; + } + + // InsituStringStream -> InsituStringStream + static RAPIDJSON_FORCEINLINE void ScanCopyUnescapedString(InsituStringStream& is, InsituStringStream& os) { + RAPIDJSON_ASSERT(&is == &os); + (void)os; + + if (is.src_ == is.dst_) { + SkipUnescapedString(is); + return; + } + + char* p = is.src_; + char *q = is.dst_; + + // Scan one by one until alignment (unaligned load may cross page boundary and cause crash) + const char* nextAligned = reinterpret_cast((reinterpret_cast(p) + 15) & static_cast(~15)); + while (p != nextAligned) + if (RAPIDJSON_UNLIKELY(*p == '\"') || RAPIDJSON_UNLIKELY(*p == '\\') || RAPIDJSON_UNLIKELY(static_cast(*p) < 0x20)) { + is.src_ = p; + is.dst_ = q; + return; + } + else + *q++ = *p++; + + // The rest of string using SIMD + static const char dquote[16] = { '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"' }; + static const char bslash[16] = { '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\' }; + static const char space[16] = { 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19 }; + const __m128i dq = _mm_loadu_si128(reinterpret_cast(&dquote[0])); + const __m128i bs = _mm_loadu_si128(reinterpret_cast(&bslash[0])); + const __m128i sp = _mm_loadu_si128(reinterpret_cast(&space[0])); + + for (;; p += 16, q += 16) { + const __m128i s = _mm_load_si128(reinterpret_cast(p)); + const __m128i t1 = _mm_cmpeq_epi8(s, dq); + const __m128i t2 = _mm_cmpeq_epi8(s, bs); + const __m128i t3 = _mm_cmpeq_epi8(_mm_max_epu8(s, sp), sp); // s < 0x20 <=> max(s, 0x19) == 0x19 + const __m128i x = _mm_or_si128(_mm_or_si128(t1, t2), t3); + unsigned short r = static_cast(_mm_movemask_epi8(x)); + if (RAPIDJSON_UNLIKELY(r != 0)) { // some of characters is escaped + size_t length; +#ifdef _MSC_VER // Find the index of first escaped + unsigned long offset; + _BitScanForward(&offset, r); + length = offset; +#else + length = static_cast(__builtin_ffs(r) - 1); +#endif + for (const char* pend = p + length; p != pend; ) + *q++ = *p++; + break; + } + _mm_storeu_si128(reinterpret_cast<__m128i *>(q), s); + } + + is.src_ = p; + is.dst_ = q; + } + + // When read/write pointers are the same for insitu stream, just skip unescaped characters + static RAPIDJSON_FORCEINLINE void SkipUnescapedString(InsituStringStream& is) { + RAPIDJSON_ASSERT(is.src_ == is.dst_); + char* p = is.src_; + + // Scan one by one until alignment (unaligned load may cross page boundary and cause crash) + const char* nextAligned = reinterpret_cast((reinterpret_cast(p) + 15) & static_cast(~15)); + for (; p != nextAligned; p++) + if (RAPIDJSON_UNLIKELY(*p == '\"') || RAPIDJSON_UNLIKELY(*p == '\\') || RAPIDJSON_UNLIKELY(static_cast(*p) < 0x20)) { + is.src_ = is.dst_ = p; + return; + } + + // The rest of string using SIMD + static const char dquote[16] = { '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"' }; + static const char bslash[16] = { '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\' }; + static const char space[16] = { 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19 }; + const __m128i dq = _mm_loadu_si128(reinterpret_cast(&dquote[0])); + const __m128i bs = _mm_loadu_si128(reinterpret_cast(&bslash[0])); + const __m128i sp = _mm_loadu_si128(reinterpret_cast(&space[0])); + + for (;; p += 16) { + const __m128i s = _mm_load_si128(reinterpret_cast(p)); + const __m128i t1 = _mm_cmpeq_epi8(s, dq); + const __m128i t2 = _mm_cmpeq_epi8(s, bs); + const __m128i t3 = _mm_cmpeq_epi8(_mm_max_epu8(s, sp), sp); // s < 0x20 <=> max(s, 0x19) == 0x19 + const __m128i x = _mm_or_si128(_mm_or_si128(t1, t2), t3); + unsigned short r = static_cast(_mm_movemask_epi8(x)); + if (RAPIDJSON_UNLIKELY(r != 0)) { // some of characters is escaped + size_t length; +#ifdef _MSC_VER // Find the index of first escaped + unsigned long offset; + _BitScanForward(&offset, r); + length = offset; +#else + length = static_cast(__builtin_ffs(r) - 1); +#endif + p += length; + break; + } + } + + is.src_ = is.dst_ = p; + } +#endif + + template + class NumberStream; + + template + class NumberStream { + public: + typedef typename InputStream::Ch Ch; + + NumberStream(GenericReader& reader, InputStream& s) : is(s) { (void)reader; } + ~NumberStream() {} + + RAPIDJSON_FORCEINLINE Ch Peek() const { return is.Peek(); } + RAPIDJSON_FORCEINLINE Ch TakePush() { return is.Take(); } + RAPIDJSON_FORCEINLINE Ch Take() { return is.Take(); } + RAPIDJSON_FORCEINLINE void Push(char) {} + + size_t Tell() { return is.Tell(); } + size_t Length() { return 0; } + const char* Pop() { return 0; } + + protected: + NumberStream& operator=(const NumberStream&); + + InputStream& is; + }; + + template + class NumberStream : public NumberStream { + typedef NumberStream Base; + public: + NumberStream(GenericReader& reader, InputStream& is) : Base(reader, is), stackStream(reader.stack_) {} + ~NumberStream() {} + + RAPIDJSON_FORCEINLINE Ch TakePush() { + stackStream.Put(static_cast(Base::is.Peek())); + return Base::is.Take(); + } + + RAPIDJSON_FORCEINLINE void Push(char c) { + stackStream.Put(c); + } + + size_t Length() { return stackStream.Length(); } + + const char* Pop() { + stackStream.Put('\0'); + return stackStream.Pop(); + } + + private: + StackStream stackStream; + }; + + template + class NumberStream : public NumberStream { + typedef NumberStream Base; + public: + NumberStream(GenericReader& reader, InputStream& is) : Base(reader, is) {} + ~NumberStream() {} + + RAPIDJSON_FORCEINLINE Ch Take() { return Base::TakePush(); } + }; + + template + void ParseNumber(InputStream& is, Handler& handler) { + internal::StreamLocalCopy copy(is); + NumberStream s(*this, copy.s); + + size_t startOffset = s.Tell(); + double d = 0.0; + bool useNanOrInf = false; + + // Parse minus + bool minus = Consume(s, '-'); + + // Parse int: zero / ( digit1-9 *DIGIT ) + unsigned i = 0; + uint64_t i64 = 0; + bool use64bit = false; + int significandDigit = 0; + if (RAPIDJSON_UNLIKELY(s.Peek() == '0')) { + i = 0; + s.TakePush(); + } + else if (RAPIDJSON_LIKELY(s.Peek() >= '1' && s.Peek() <= '9')) { + i = static_cast(s.TakePush() - '0'); + + if (minus) + while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) { + if (RAPIDJSON_UNLIKELY(i >= 214748364)) { // 2^31 = 2147483648 + if (RAPIDJSON_LIKELY(i != 214748364 || s.Peek() > '8')) { + i64 = i; + use64bit = true; + break; + } + } + i = i * 10 + static_cast(s.TakePush() - '0'); + significandDigit++; + } + else + while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) { + if (RAPIDJSON_UNLIKELY(i >= 429496729)) { // 2^32 - 1 = 4294967295 + if (RAPIDJSON_LIKELY(i != 429496729 || s.Peek() > '5')) { + i64 = i; + use64bit = true; + break; + } + } + i = i * 10 + static_cast(s.TakePush() - '0'); + significandDigit++; + } + } + // Parse NaN or Infinity here + else if ((parseFlags & kParseNanAndInfFlag) && RAPIDJSON_LIKELY((s.Peek() == 'I' || s.Peek() == 'N'))) { + useNanOrInf = true; + if (RAPIDJSON_LIKELY(Consume(s, 'N') && Consume(s, 'a') && Consume(s, 'N'))) { + d = std::numeric_limits::quiet_NaN(); + } + else if (RAPIDJSON_LIKELY(Consume(s, 'I') && Consume(s, 'n') && Consume(s, 'f'))) { + d = (minus ? -std::numeric_limits::infinity() : std::numeric_limits::infinity()); + if (RAPIDJSON_UNLIKELY(s.Peek() == 'i' && !(Consume(s, 'i') && Consume(s, 'n') + && Consume(s, 'i') && Consume(s, 't') && Consume(s, 'y')))) + RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, s.Tell()); + } + else + RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, s.Tell()); + } + else + RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, s.Tell()); + + // Parse 64bit int + bool useDouble = false; + if (use64bit) { + if (minus) + while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) { + if (RAPIDJSON_UNLIKELY(i64 >= RAPIDJSON_UINT64_C2(0x0CCCCCCC, 0xCCCCCCCC))) // 2^63 = 9223372036854775808 + if (RAPIDJSON_LIKELY(i64 != RAPIDJSON_UINT64_C2(0x0CCCCCCC, 0xCCCCCCCC) || s.Peek() > '8')) { + d = static_cast(i64); + useDouble = true; + break; + } + i64 = i64 * 10 + static_cast(s.TakePush() - '0'); + significandDigit++; + } + else + while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) { + if (RAPIDJSON_UNLIKELY(i64 >= RAPIDJSON_UINT64_C2(0x19999999, 0x99999999))) // 2^64 - 1 = 18446744073709551615 + if (RAPIDJSON_LIKELY(i64 != RAPIDJSON_UINT64_C2(0x19999999, 0x99999999) || s.Peek() > '5')) { + d = static_cast(i64); + useDouble = true; + break; + } + i64 = i64 * 10 + static_cast(s.TakePush() - '0'); + significandDigit++; + } + } + + // Force double for big integer + if (useDouble) { + while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) { + if (RAPIDJSON_UNLIKELY(d >= 1.7976931348623157e307)) // DBL_MAX / 10.0 + RAPIDJSON_PARSE_ERROR(kParseErrorNumberTooBig, startOffset); + d = d * 10 + (s.TakePush() - '0'); + } + } + + // Parse frac = decimal-point 1*DIGIT + int expFrac = 0; + size_t decimalPosition; + if (Consume(s, '.')) { + decimalPosition = s.Length(); + + if (RAPIDJSON_UNLIKELY(!(s.Peek() >= '0' && s.Peek() <= '9'))) + RAPIDJSON_PARSE_ERROR(kParseErrorNumberMissFraction, s.Tell()); + + if (!useDouble) { +#if RAPIDJSON_64BIT + // Use i64 to store significand in 64-bit architecture + if (!use64bit) + i64 = i; + + while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) { + if (i64 > RAPIDJSON_UINT64_C2(0x1FFFFF, 0xFFFFFFFF)) // 2^53 - 1 for fast path + break; + else { + i64 = i64 * 10 + static_cast(s.TakePush() - '0'); + --expFrac; + if (i64 != 0) + significandDigit++; + } + } + + d = static_cast(i64); +#else + // Use double to store significand in 32-bit architecture + d = static_cast(use64bit ? i64 : i); +#endif + useDouble = true; + } + + while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) { + if (significandDigit < 17) { + d = d * 10.0 + (s.TakePush() - '0'); + --expFrac; + if (RAPIDJSON_LIKELY(d > 0.0)) + significandDigit++; + } + else + s.TakePush(); + } + } + else + decimalPosition = s.Length(); // decimal position at the end of integer. + + // Parse exp = e [ minus / plus ] 1*DIGIT + int exp = 0; + if (Consume(s, 'e') || Consume(s, 'E')) { + if (!useDouble) { + d = static_cast(use64bit ? i64 : i); + useDouble = true; + } + + bool expMinus = false; + if (Consume(s, '+')) + ; + else if (Consume(s, '-')) + expMinus = true; + + if (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) { + exp = static_cast(s.Take() - '0'); + if (expMinus) { + while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) { + exp = exp * 10 + static_cast(s.Take() - '0'); + if (exp >= 214748364) { // Issue #313: prevent overflow exponent + while (RAPIDJSON_UNLIKELY(s.Peek() >= '0' && s.Peek() <= '9')) // Consume the rest of exponent + s.Take(); + } + } + } + else { // positive exp + int maxExp = 308 - expFrac; + while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) { + exp = exp * 10 + static_cast(s.Take() - '0'); + if (RAPIDJSON_UNLIKELY(exp > maxExp)) + RAPIDJSON_PARSE_ERROR(kParseErrorNumberTooBig, startOffset); + } + } + } + else + RAPIDJSON_PARSE_ERROR(kParseErrorNumberMissExponent, s.Tell()); + + if (expMinus) + exp = -exp; + } + + // Finish parsing, call event according to the type of number. + bool cont = true; + + if (parseFlags & kParseNumbersAsStringsFlag) { + if (parseFlags & kParseInsituFlag) { + s.Pop(); // Pop stack no matter if it will be used or not. + typename InputStream::Ch* head = is.PutBegin(); + const size_t length = s.Tell() - startOffset; + RAPIDJSON_ASSERT(length <= 0xFFFFFFFF); + // unable to insert the \0 character here, it will erase the comma after this number + const typename TargetEncoding::Ch* const str = reinterpret_cast(head); + cont = handler.RawNumber(str, SizeType(length), false); + } + else { + SizeType numCharsToCopy = static_cast(s.Length()); + StringStream srcStream(s.Pop()); + StackStream dstStream(stack_); + while (numCharsToCopy--) { + Transcoder, TargetEncoding>::Transcode(srcStream, dstStream); + } + dstStream.Put('\0'); + const typename TargetEncoding::Ch* str = dstStream.Pop(); + const SizeType length = static_cast(dstStream.Length()) - 1; + cont = handler.RawNumber(str, SizeType(length), true); + } + } + else { + size_t length = s.Length(); + const char* decimal = s.Pop(); // Pop stack no matter if it will be used or not. + + if (useDouble) { + int p = exp + expFrac; + if (parseFlags & kParseFullPrecisionFlag) + d = internal::StrtodFullPrecision(d, p, decimal, length, decimalPosition, exp); + else + d = internal::StrtodNormalPrecision(d, p); + + cont = handler.Double(minus ? -d : d); + } + else if (useNanOrInf) { + cont = handler.Double(d); + } + else { + if (use64bit) { + if (minus) + cont = handler.Int64(static_cast(~i64 + 1)); + else + cont = handler.Uint64(i64); + } + else { + if (minus) + cont = handler.Int(static_cast(~i + 1)); + else + cont = handler.Uint(i); + } + } + } + if (RAPIDJSON_UNLIKELY(!cont)) + RAPIDJSON_PARSE_ERROR(kParseErrorTermination, startOffset); + } + + // Parse any JSON value + template + void ParseValue(InputStream& is, Handler& handler) { + switch (is.Peek()) { + case 'n': ParseNull (is, handler); break; + case 't': ParseTrue (is, handler); break; + case 'f': ParseFalse (is, handler); break; + case '"': ParseString(is, handler); break; + case '{': ParseObject(is, handler); break; + case '[': ParseArray (is, handler); break; + default : + ParseNumber(is, handler); + break; + + } + } + + // Iterative Parsing + + // States + enum IterativeParsingState { + IterativeParsingStartState = 0, + IterativeParsingFinishState, + IterativeParsingErrorState, + + // Object states + IterativeParsingObjectInitialState, + IterativeParsingMemberKeyState, + IterativeParsingKeyValueDelimiterState, + IterativeParsingMemberValueState, + IterativeParsingMemberDelimiterState, + IterativeParsingObjectFinishState, + + // Array states + IterativeParsingArrayInitialState, + IterativeParsingElementState, + IterativeParsingElementDelimiterState, + IterativeParsingArrayFinishState, + + // Single value state + IterativeParsingValueState + }; + + enum { cIterativeParsingStateCount = IterativeParsingValueState + 1 }; + + // Tokens + enum Token { + LeftBracketToken = 0, + RightBracketToken, + + LeftCurlyBracketToken, + RightCurlyBracketToken, + + CommaToken, + ColonToken, + + StringToken, + FalseToken, + TrueToken, + NullToken, + NumberToken, + + kTokenCount + }; + + RAPIDJSON_FORCEINLINE Token Tokenize(Ch c) { + +//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN +#define N NumberToken +#define N16 N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N + // Maps from ASCII to Token + static const unsigned char tokenMap[256] = { + N16, // 00~0F + N16, // 10~1F + N, N, StringToken, N, N, N, N, N, N, N, N, N, CommaToken, N, N, N, // 20~2F + N, N, N, N, N, N, N, N, N, N, ColonToken, N, N, N, N, N, // 30~3F + N16, // 40~4F + N, N, N, N, N, N, N, N, N, N, N, LeftBracketToken, N, RightBracketToken, N, N, // 50~5F + N, N, N, N, N, N, FalseToken, N, N, N, N, N, N, N, NullToken, N, // 60~6F + N, N, N, N, TrueToken, N, N, N, N, N, N, LeftCurlyBracketToken, N, RightCurlyBracketToken, N, N, // 70~7F + N16, N16, N16, N16, N16, N16, N16, N16 // 80~FF + }; +#undef N +#undef N16 +//!@endcond + + if (sizeof(Ch) == 1 || static_cast(c) < 256) + return static_cast(tokenMap[static_cast(c)]); + else + return NumberToken; + } + + RAPIDJSON_FORCEINLINE IterativeParsingState Predict(IterativeParsingState state, Token token) { + // current state x one lookahead token -> new state + static const char G[cIterativeParsingStateCount][kTokenCount] = { + // Start + { + IterativeParsingArrayInitialState, // Left bracket + IterativeParsingErrorState, // Right bracket + IterativeParsingObjectInitialState, // Left curly bracket + IterativeParsingErrorState, // Right curly bracket + IterativeParsingErrorState, // Comma + IterativeParsingErrorState, // Colon + IterativeParsingValueState, // String + IterativeParsingValueState, // False + IterativeParsingValueState, // True + IterativeParsingValueState, // Null + IterativeParsingValueState // Number + }, + // Finish(sink state) + { + IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, + IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, + IterativeParsingErrorState + }, + // Error(sink state) + { + IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, + IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, + IterativeParsingErrorState + }, + // ObjectInitial + { + IterativeParsingErrorState, // Left bracket + IterativeParsingErrorState, // Right bracket + IterativeParsingErrorState, // Left curly bracket + IterativeParsingObjectFinishState, // Right curly bracket + IterativeParsingErrorState, // Comma + IterativeParsingErrorState, // Colon + IterativeParsingMemberKeyState, // String + IterativeParsingErrorState, // False + IterativeParsingErrorState, // True + IterativeParsingErrorState, // Null + IterativeParsingErrorState // Number + }, + // MemberKey + { + IterativeParsingErrorState, // Left bracket + IterativeParsingErrorState, // Right bracket + IterativeParsingErrorState, // Left curly bracket + IterativeParsingErrorState, // Right curly bracket + IterativeParsingErrorState, // Comma + IterativeParsingKeyValueDelimiterState, // Colon + IterativeParsingErrorState, // String + IterativeParsingErrorState, // False + IterativeParsingErrorState, // True + IterativeParsingErrorState, // Null + IterativeParsingErrorState // Number + }, + // KeyValueDelimiter + { + IterativeParsingArrayInitialState, // Left bracket(push MemberValue state) + IterativeParsingErrorState, // Right bracket + IterativeParsingObjectInitialState, // Left curly bracket(push MemberValue state) + IterativeParsingErrorState, // Right curly bracket + IterativeParsingErrorState, // Comma + IterativeParsingErrorState, // Colon + IterativeParsingMemberValueState, // String + IterativeParsingMemberValueState, // False + IterativeParsingMemberValueState, // True + IterativeParsingMemberValueState, // Null + IterativeParsingMemberValueState // Number + }, + // MemberValue + { + IterativeParsingErrorState, // Left bracket + IterativeParsingErrorState, // Right bracket + IterativeParsingErrorState, // Left curly bracket + IterativeParsingObjectFinishState, // Right curly bracket + IterativeParsingMemberDelimiterState, // Comma + IterativeParsingErrorState, // Colon + IterativeParsingErrorState, // String + IterativeParsingErrorState, // False + IterativeParsingErrorState, // True + IterativeParsingErrorState, // Null + IterativeParsingErrorState // Number + }, + // MemberDelimiter + { + IterativeParsingErrorState, // Left bracket + IterativeParsingErrorState, // Right bracket + IterativeParsingErrorState, // Left curly bracket + IterativeParsingObjectFinishState, // Right curly bracket + IterativeParsingErrorState, // Comma + IterativeParsingErrorState, // Colon + IterativeParsingMemberKeyState, // String + IterativeParsingErrorState, // False + IterativeParsingErrorState, // True + IterativeParsingErrorState, // Null + IterativeParsingErrorState // Number + }, + // ObjectFinish(sink state) + { + IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, + IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, + IterativeParsingErrorState + }, + // ArrayInitial + { + IterativeParsingArrayInitialState, // Left bracket(push Element state) + IterativeParsingArrayFinishState, // Right bracket + IterativeParsingObjectInitialState, // Left curly bracket(push Element state) + IterativeParsingErrorState, // Right curly bracket + IterativeParsingErrorState, // Comma + IterativeParsingErrorState, // Colon + IterativeParsingElementState, // String + IterativeParsingElementState, // False + IterativeParsingElementState, // True + IterativeParsingElementState, // Null + IterativeParsingElementState // Number + }, + // Element + { + IterativeParsingErrorState, // Left bracket + IterativeParsingArrayFinishState, // Right bracket + IterativeParsingErrorState, // Left curly bracket + IterativeParsingErrorState, // Right curly bracket + IterativeParsingElementDelimiterState, // Comma + IterativeParsingErrorState, // Colon + IterativeParsingErrorState, // String + IterativeParsingErrorState, // False + IterativeParsingErrorState, // True + IterativeParsingErrorState, // Null + IterativeParsingErrorState // Number + }, + // ElementDelimiter + { + IterativeParsingArrayInitialState, // Left bracket(push Element state) + IterativeParsingArrayFinishState, // Right bracket + IterativeParsingObjectInitialState, // Left curly bracket(push Element state) + IterativeParsingErrorState, // Right curly bracket + IterativeParsingErrorState, // Comma + IterativeParsingErrorState, // Colon + IterativeParsingElementState, // String + IterativeParsingElementState, // False + IterativeParsingElementState, // True + IterativeParsingElementState, // Null + IterativeParsingElementState // Number + }, + // ArrayFinish(sink state) + { + IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, + IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, + IterativeParsingErrorState + }, + // Single Value (sink state) + { + IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, + IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, + IterativeParsingErrorState + } + }; // End of G + + return static_cast(G[state][token]); + } + + // Make an advance in the token stream and state based on the candidate destination state which was returned by Transit(). + // May return a new state on state pop. + template + RAPIDJSON_FORCEINLINE IterativeParsingState Transit(IterativeParsingState src, Token token, IterativeParsingState dst, InputStream& is, Handler& handler) { + (void)token; + + switch (dst) { + case IterativeParsingErrorState: + return dst; + + case IterativeParsingObjectInitialState: + case IterativeParsingArrayInitialState: + { + // Push the state(Element or MemeberValue) if we are nested in another array or value of member. + // In this way we can get the correct state on ObjectFinish or ArrayFinish by frame pop. + IterativeParsingState n = src; + if (src == IterativeParsingArrayInitialState || src == IterativeParsingElementDelimiterState) + n = IterativeParsingElementState; + else if (src == IterativeParsingKeyValueDelimiterState) + n = IterativeParsingMemberValueState; + // Push current state. + *stack_.template Push(1) = n; + // Initialize and push the member/element count. + *stack_.template Push(1) = 0; + // Call handler + bool hr = (dst == IterativeParsingObjectInitialState) ? handler.StartObject() : handler.StartArray(); + // On handler short circuits the parsing. + if (!hr) { + RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorTermination, is.Tell()); + return IterativeParsingErrorState; + } + else { + is.Take(); + return dst; + } + } + + case IterativeParsingMemberKeyState: + ParseString(is, handler, true); + if (HasParseError()) + return IterativeParsingErrorState; + else + return dst; + + case IterativeParsingKeyValueDelimiterState: + RAPIDJSON_ASSERT(token == ColonToken); + is.Take(); + return dst; + + case IterativeParsingMemberValueState: + // Must be non-compound value. Or it would be ObjectInitial or ArrayInitial state. + ParseValue(is, handler); + if (HasParseError()) { + return IterativeParsingErrorState; + } + return dst; + + case IterativeParsingElementState: + // Must be non-compound value. Or it would be ObjectInitial or ArrayInitial state. + ParseValue(is, handler); + if (HasParseError()) { + return IterativeParsingErrorState; + } + return dst; + + case IterativeParsingMemberDelimiterState: + case IterativeParsingElementDelimiterState: + is.Take(); + // Update member/element count. + *stack_.template Top() = *stack_.template Top() + 1; + return dst; + + case IterativeParsingObjectFinishState: + { + // Transit from delimiter is only allowed when trailing commas are enabled + if (!(parseFlags & kParseTrailingCommasFlag) && src == IterativeParsingMemberDelimiterState) { + RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorObjectMissName, is.Tell()); + return IterativeParsingErrorState; + } + // Get member count. + SizeType c = *stack_.template Pop(1); + // If the object is not empty, count the last member. + if (src == IterativeParsingMemberValueState) + ++c; + // Restore the state. + IterativeParsingState n = static_cast(*stack_.template Pop(1)); + // Transit to Finish state if this is the topmost scope. + if (n == IterativeParsingStartState) + n = IterativeParsingFinishState; + // Call handler + bool hr = handler.EndObject(c); + // On handler short circuits the parsing. + if (!hr) { + RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorTermination, is.Tell()); + return IterativeParsingErrorState; + } + else { + is.Take(); + return n; + } + } + + case IterativeParsingArrayFinishState: + { + // Transit from delimiter is only allowed when trailing commas are enabled + if (!(parseFlags & kParseTrailingCommasFlag) && src == IterativeParsingElementDelimiterState) { + RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorValueInvalid, is.Tell()); + return IterativeParsingErrorState; + } + // Get element count. + SizeType c = *stack_.template Pop(1); + // If the array is not empty, count the last element. + if (src == IterativeParsingElementState) + ++c; + // Restore the state. + IterativeParsingState n = static_cast(*stack_.template Pop(1)); + // Transit to Finish state if this is the topmost scope. + if (n == IterativeParsingStartState) + n = IterativeParsingFinishState; + // Call handler + bool hr = handler.EndArray(c); + // On handler short circuits the parsing. + if (!hr) { + RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorTermination, is.Tell()); + return IterativeParsingErrorState; + } + else { + is.Take(); + return n; + } + } + + default: + // This branch is for IterativeParsingValueState actually. + // Use `default:` rather than + // `case IterativeParsingValueState:` is for code coverage. + + // The IterativeParsingStartState is not enumerated in this switch-case. + // It is impossible for that case. And it can be caught by following assertion. + + // The IterativeParsingFinishState is not enumerated in this switch-case either. + // It is a "derivative" state which cannot triggered from Predict() directly. + // Therefore it cannot happen here. And it can be caught by following assertion. + RAPIDJSON_ASSERT(dst == IterativeParsingValueState); + + // Must be non-compound value. Or it would be ObjectInitial or ArrayInitial state. + ParseValue(is, handler); + if (HasParseError()) { + return IterativeParsingErrorState; + } + return IterativeParsingFinishState; + } + } + + template + void HandleError(IterativeParsingState src, InputStream& is) { + if (HasParseError()) { + // Error flag has been set. + return; + } + + switch (src) { + case IterativeParsingStartState: RAPIDJSON_PARSE_ERROR(kParseErrorDocumentEmpty, is.Tell()); return; + case IterativeParsingFinishState: RAPIDJSON_PARSE_ERROR(kParseErrorDocumentRootNotSingular, is.Tell()); return; + case IterativeParsingObjectInitialState: + case IterativeParsingMemberDelimiterState: RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissName, is.Tell()); return; + case IterativeParsingMemberKeyState: RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissColon, is.Tell()); return; + case IterativeParsingMemberValueState: RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissCommaOrCurlyBracket, is.Tell()); return; + case IterativeParsingKeyValueDelimiterState: + case IterativeParsingArrayInitialState: + case IterativeParsingElementDelimiterState: RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, is.Tell()); return; + default: RAPIDJSON_ASSERT(src == IterativeParsingElementState); RAPIDJSON_PARSE_ERROR(kParseErrorArrayMissCommaOrSquareBracket, is.Tell()); return; + } + } + + template + ParseResult IterativeParse(InputStream& is, Handler& handler) { + parseResult_.Clear(); + ClearStackOnExit scope(*this); + IterativeParsingState state = IterativeParsingStartState; + + SkipWhitespaceAndComments(is); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN(parseResult_); + while (is.Peek() != '\0') { + Token t = Tokenize(is.Peek()); + IterativeParsingState n = Predict(state, t); + IterativeParsingState d = Transit(state, t, n, is, handler); + + if (d == IterativeParsingErrorState) { + HandleError(state, is); + break; + } + + state = d; + + // Do not further consume streams if a root JSON has been parsed. + if ((parseFlags & kParseStopWhenDoneFlag) && state == IterativeParsingFinishState) + break; + + SkipWhitespaceAndComments(is); + RAPIDJSON_PARSE_ERROR_EARLY_RETURN(parseResult_); + } + + // Handle the end of file. + if (state != IterativeParsingFinishState) + HandleError(state, is); + + return parseResult_; + } + + static const size_t kDefaultStackCapacity = 256; //!< Default stack capacity in bytes for storing a single decoded string. + internal::Stack stack_; //!< A stack for storing decoded string temporarily during non-destructive parsing. + ParseResult parseResult_; +}; // class GenericReader + +//! Reader with UTF8 encoding and default allocator. +typedef GenericReader, UTF8<> > Reader; + +RAPIDJSON_NAMESPACE_END + +#ifdef __clang__ +RAPIDJSON_DIAG_POP +#endif + + +#ifdef __GNUC__ +RAPIDJSON_DIAG_POP +#endif + +#ifdef _MSC_VER +RAPIDJSON_DIAG_POP +#endif + +#endif // RAPIDJSON_READER_H_ diff --git a/include/rapidjson/schema.h b/include/rapidjson/schema.h new file mode 100644 index 0000000..b182aa2 --- /dev/null +++ b/include/rapidjson/schema.h @@ -0,0 +1,2006 @@ +// Tencent is pleased to support the open source community by making RapidJSON available-> +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip-> All rights reserved-> +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License-> You may obtain a copy of the License at +// +// http://opensource->org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied-> See the License for the +// specific language governing permissions and limitations under the License-> + +#ifndef RAPIDJSON_SCHEMA_H_ +#define RAPIDJSON_SCHEMA_H_ + +#include "document.h" +#include "pointer.h" +#include // abs, floor + +#if !defined(RAPIDJSON_SCHEMA_USE_INTERNALREGEX) +#define RAPIDJSON_SCHEMA_USE_INTERNALREGEX 1 +#else +#define RAPIDJSON_SCHEMA_USE_INTERNALREGEX 0 +#endif + +#if !RAPIDJSON_SCHEMA_USE_INTERNALREGEX && !defined(RAPIDJSON_SCHEMA_USE_STDREGEX) && (__cplusplus >=201103L || (defined(_MSC_VER) && _MSC_VER >= 1800)) +#define RAPIDJSON_SCHEMA_USE_STDREGEX 1 +#else +#define RAPIDJSON_SCHEMA_USE_STDREGEX 0 +#endif + +#if RAPIDJSON_SCHEMA_USE_INTERNALREGEX +#include "internal/regex.h" +#elif RAPIDJSON_SCHEMA_USE_STDREGEX +#include +#endif + +#if RAPIDJSON_SCHEMA_USE_INTERNALREGEX || RAPIDJSON_SCHEMA_USE_STDREGEX +#define RAPIDJSON_SCHEMA_HAS_REGEX 1 +#else +#define RAPIDJSON_SCHEMA_HAS_REGEX 0 +#endif + +#ifndef RAPIDJSON_SCHEMA_VERBOSE +#define RAPIDJSON_SCHEMA_VERBOSE 0 +#endif + +#if RAPIDJSON_SCHEMA_VERBOSE +#include "stringbuffer.h" +#endif + +RAPIDJSON_DIAG_PUSH + +#if defined(__GNUC__) +RAPIDJSON_DIAG_OFF(effc++) +#endif + +#ifdef __clang__ +RAPIDJSON_DIAG_OFF(weak-vtables) +RAPIDJSON_DIAG_OFF(exit-time-destructors) +RAPIDJSON_DIAG_OFF(c++98-compat-pedantic) +RAPIDJSON_DIAG_OFF(variadic-macros) +#endif + +#ifdef _MSC_VER +RAPIDJSON_DIAG_OFF(4512) // assignment operator could not be generated +#endif + +RAPIDJSON_NAMESPACE_BEGIN + +/////////////////////////////////////////////////////////////////////////////// +// Verbose Utilities + +#if RAPIDJSON_SCHEMA_VERBOSE + +namespace internal { + +inline void PrintInvalidKeyword(const char* keyword) { + printf("Fail keyword: %s\n", keyword); +} + +inline void PrintInvalidKeyword(const wchar_t* keyword) { + wprintf(L"Fail keyword: %ls\n", keyword); +} + +inline void PrintInvalidDocument(const char* document) { + printf("Fail document: %s\n\n", document); +} + +inline void PrintInvalidDocument(const wchar_t* document) { + wprintf(L"Fail document: %ls\n\n", document); +} + +inline void PrintValidatorPointers(unsigned depth, const char* s, const char* d) { + printf("S: %*s%s\nD: %*s%s\n\n", depth * 4, " ", s, depth * 4, " ", d); +} + +inline void PrintValidatorPointers(unsigned depth, const wchar_t* s, const wchar_t* d) { + wprintf(L"S: %*ls%ls\nD: %*ls%ls\n\n", depth * 4, L" ", s, depth * 4, L" ", d); +} + +} // namespace internal + +#endif // RAPIDJSON_SCHEMA_VERBOSE + +/////////////////////////////////////////////////////////////////////////////// +// RAPIDJSON_INVALID_KEYWORD_RETURN + +#if RAPIDJSON_SCHEMA_VERBOSE +#define RAPIDJSON_INVALID_KEYWORD_VERBOSE(keyword) internal::PrintInvalidKeyword(keyword) +#else +#define RAPIDJSON_INVALID_KEYWORD_VERBOSE(keyword) +#endif + +#define RAPIDJSON_INVALID_KEYWORD_RETURN(keyword)\ +RAPIDJSON_MULTILINEMACRO_BEGIN\ + context.invalidKeyword = keyword.GetString();\ + RAPIDJSON_INVALID_KEYWORD_VERBOSE(keyword.GetString());\ + return false;\ +RAPIDJSON_MULTILINEMACRO_END + +/////////////////////////////////////////////////////////////////////////////// +// Forward declarations + +template +class GenericSchemaDocument; + +namespace internal { + +template +class Schema; + +/////////////////////////////////////////////////////////////////////////////// +// ISchemaValidator + +class ISchemaValidator { +public: + virtual ~ISchemaValidator() {} + virtual bool IsValid() const = 0; +}; + +/////////////////////////////////////////////////////////////////////////////// +// ISchemaStateFactory + +template +class ISchemaStateFactory { +public: + virtual ~ISchemaStateFactory() {} + virtual ISchemaValidator* CreateSchemaValidator(const SchemaType&) = 0; + virtual void DestroySchemaValidator(ISchemaValidator* validator) = 0; + virtual void* CreateHasher() = 0; + virtual uint64_t GetHashCode(void* hasher) = 0; + virtual void DestroryHasher(void* hasher) = 0; + virtual void* MallocState(size_t size) = 0; + virtual void FreeState(void* p) = 0; +}; + +/////////////////////////////////////////////////////////////////////////////// +// Hasher + +// For comparison of compound value +template +class Hasher { +public: + typedef typename Encoding::Ch Ch; + + Hasher(Allocator* allocator = 0, size_t stackCapacity = kDefaultSize) : stack_(allocator, stackCapacity) {} + + bool Null() { return WriteType(kNullType); } + bool Bool(bool b) { return WriteType(b ? kTrueType : kFalseType); } + bool Int(int i) { Number n; n.u.i = i; n.d = static_cast(i); return WriteNumber(n); } + bool Uint(unsigned u) { Number n; n.u.u = u; n.d = static_cast(u); return WriteNumber(n); } + bool Int64(int64_t i) { Number n; n.u.i = i; n.d = static_cast(i); return WriteNumber(n); } + bool Uint64(uint64_t u) { Number n; n.u.u = u; n.d = static_cast(u); return WriteNumber(n); } + bool Double(double d) { + Number n; + if (d < 0) n.u.i = static_cast(d); + else n.u.u = static_cast(d); + n.d = d; + return WriteNumber(n); + } + + bool RawNumber(const Ch* str, SizeType len, bool) { + WriteBuffer(kNumberType, str, len * sizeof(Ch)); + return true; + } + + bool String(const Ch* str, SizeType len, bool) { + WriteBuffer(kStringType, str, len * sizeof(Ch)); + return true; + } + + bool StartObject() { return true; } + bool Key(const Ch* str, SizeType len, bool copy) { return String(str, len, copy); } + bool EndObject(SizeType memberCount) { + uint64_t h = Hash(0, kObjectType); + uint64_t* kv = stack_.template Pop(memberCount * 2); + for (SizeType i = 0; i < memberCount; i++) + h ^= Hash(kv[i * 2], kv[i * 2 + 1]); // Use xor to achieve member order insensitive + *stack_.template Push() = h; + return true; + } + + bool StartArray() { return true; } + bool EndArray(SizeType elementCount) { + uint64_t h = Hash(0, kArrayType); + uint64_t* e = stack_.template Pop(elementCount); + for (SizeType i = 0; i < elementCount; i++) + h = Hash(h, e[i]); // Use hash to achieve element order sensitive + *stack_.template Push() = h; + return true; + } + + bool IsValid() const { return stack_.GetSize() == sizeof(uint64_t); } + + uint64_t GetHashCode() const { + RAPIDJSON_ASSERT(IsValid()); + return *stack_.template Top(); + } + +private: + static const size_t kDefaultSize = 256; + struct Number { + union U { + uint64_t u; + int64_t i; + }u; + double d; + }; + + bool WriteType(Type type) { return WriteBuffer(type, 0, 0); } + + bool WriteNumber(const Number& n) { return WriteBuffer(kNumberType, &n, sizeof(n)); } + + bool WriteBuffer(Type type, const void* data, size_t len) { + // FNV-1a from http://isthe.com/chongo/tech/comp/fnv/ + uint64_t h = Hash(RAPIDJSON_UINT64_C2(0x84222325, 0xcbf29ce4), type); + const unsigned char* d = static_cast(data); + for (size_t i = 0; i < len; i++) + h = Hash(h, d[i]); + *stack_.template Push() = h; + return true; + } + + static uint64_t Hash(uint64_t h, uint64_t d) { + static const uint64_t kPrime = RAPIDJSON_UINT64_C2(0x00000100, 0x000001b3); + h ^= d; + h *= kPrime; + return h; + } + + Stack stack_; +}; + +/////////////////////////////////////////////////////////////////////////////// +// SchemaValidationContext + +template +struct SchemaValidationContext { + typedef Schema SchemaType; + typedef ISchemaStateFactory SchemaValidatorFactoryType; + typedef typename SchemaType::ValueType ValueType; + typedef typename ValueType::Ch Ch; + + enum PatternValidatorType { + kPatternValidatorOnly, + kPatternValidatorWithProperty, + kPatternValidatorWithAdditionalProperty + }; + + SchemaValidationContext(SchemaValidatorFactoryType& f, const SchemaType* s) : + factory(f), + schema(s), + valueSchema(), + invalidKeyword(), + hasher(), + arrayElementHashCodes(), + validators(), + validatorCount(), + patternPropertiesValidators(), + patternPropertiesValidatorCount(), + patternPropertiesSchemas(), + patternPropertiesSchemaCount(), + valuePatternValidatorType(kPatternValidatorOnly), + propertyExist(), + inArray(false), + valueUniqueness(false), + arrayUniqueness(false) + { + } + + ~SchemaValidationContext() { + if (hasher) + factory.DestroryHasher(hasher); + if (validators) { + for (SizeType i = 0; i < validatorCount; i++) + factory.DestroySchemaValidator(validators[i]); + factory.FreeState(validators); + } + if (patternPropertiesValidators) { + for (SizeType i = 0; i < patternPropertiesValidatorCount; i++) + factory.DestroySchemaValidator(patternPropertiesValidators[i]); + factory.FreeState(patternPropertiesValidators); + } + if (patternPropertiesSchemas) + factory.FreeState(patternPropertiesSchemas); + if (propertyExist) + factory.FreeState(propertyExist); + } + + SchemaValidatorFactoryType& factory; + const SchemaType* schema; + const SchemaType* valueSchema; + const Ch* invalidKeyword; + void* hasher; // Only validator access + void* arrayElementHashCodes; // Only validator access this + ISchemaValidator** validators; + SizeType validatorCount; + ISchemaValidator** patternPropertiesValidators; + SizeType patternPropertiesValidatorCount; + const SchemaType** patternPropertiesSchemas; + SizeType patternPropertiesSchemaCount; + PatternValidatorType valuePatternValidatorType; + PatternValidatorType objectPatternValidatorType; + SizeType arrayElementIndex; + bool* propertyExist; + bool inArray; + bool valueUniqueness; + bool arrayUniqueness; +}; + +/////////////////////////////////////////////////////////////////////////////// +// Schema + +template +class Schema { +public: + typedef typename SchemaDocumentType::ValueType ValueType; + typedef typename SchemaDocumentType::AllocatorType AllocatorType; + typedef typename SchemaDocumentType::PointerType PointerType; + typedef typename ValueType::EncodingType EncodingType; + typedef typename EncodingType::Ch Ch; + typedef SchemaValidationContext Context; + typedef Schema SchemaType; + typedef GenericValue SValue; + friend class GenericSchemaDocument; + + Schema(SchemaDocumentType* schemaDocument, const PointerType& p, const ValueType& value, const ValueType& document, AllocatorType* allocator) : + allocator_(allocator), + enum_(), + enumCount_(), + not_(), + type_((1 << kTotalSchemaType) - 1), // typeless + validatorCount_(), + properties_(), + additionalPropertiesSchema_(), + patternProperties_(), + patternPropertyCount_(), + propertyCount_(), + minProperties_(), + maxProperties_(SizeType(~0)), + additionalProperties_(true), + hasDependencies_(), + hasRequired_(), + hasSchemaDependencies_(), + additionalItemsSchema_(), + itemsList_(), + itemsTuple_(), + itemsTupleCount_(), + minItems_(), + maxItems_(SizeType(~0)), + additionalItems_(true), + uniqueItems_(false), + pattern_(), + minLength_(0), + maxLength_(~SizeType(0)), + exclusiveMinimum_(false), + exclusiveMaximum_(false) + { + typedef typename SchemaDocumentType::ValueType ValueType; + typedef typename ValueType::ConstValueIterator ConstValueIterator; + typedef typename ValueType::ConstMemberIterator ConstMemberIterator; + + if (!value.IsObject()) + return; + + if (const ValueType* v = GetMember(value, GetTypeString())) { + type_ = 0; + if (v->IsString()) + AddType(*v); + else if (v->IsArray()) + for (ConstValueIterator itr = v->Begin(); itr != v->End(); ++itr) + AddType(*itr); + } + + if (const ValueType* v = GetMember(value, GetEnumString())) + if (v->IsArray() && v->Size() > 0) { + enum_ = static_cast(allocator_->Malloc(sizeof(uint64_t) * v->Size())); + for (ConstValueIterator itr = v->Begin(); itr != v->End(); ++itr) { + typedef Hasher > EnumHasherType; + char buffer[256 + 24]; + MemoryPoolAllocator<> hasherAllocator(buffer, sizeof(buffer)); + EnumHasherType h(&hasherAllocator, 256); + itr->Accept(h); + enum_[enumCount_++] = h.GetHashCode(); + } + } + + if (schemaDocument) { + AssignIfExist(allOf_, *schemaDocument, p, value, GetAllOfString(), document); + AssignIfExist(anyOf_, *schemaDocument, p, value, GetAnyOfString(), document); + AssignIfExist(oneOf_, *schemaDocument, p, value, GetOneOfString(), document); + } + + if (const ValueType* v = GetMember(value, GetNotString())) { + schemaDocument->CreateSchema(¬_, p.Append(GetNotString(), allocator_), *v, document); + notValidatorIndex_ = validatorCount_; + validatorCount_++; + } + + // Object + + const ValueType* properties = GetMember(value, GetPropertiesString()); + const ValueType* required = GetMember(value, GetRequiredString()); + const ValueType* dependencies = GetMember(value, GetDependenciesString()); + { + // Gather properties from properties/required/dependencies + SValue allProperties(kArrayType); + + if (properties && properties->IsObject()) + for (ConstMemberIterator itr = properties->MemberBegin(); itr != properties->MemberEnd(); ++itr) + AddUniqueElement(allProperties, itr->name); + + if (required && required->IsArray()) + for (ConstValueIterator itr = required->Begin(); itr != required->End(); ++itr) + if (itr->IsString()) + AddUniqueElement(allProperties, *itr); + + if (dependencies && dependencies->IsObject()) + for (ConstMemberIterator itr = dependencies->MemberBegin(); itr != dependencies->MemberEnd(); ++itr) { + AddUniqueElement(allProperties, itr->name); + if (itr->value.IsArray()) + for (ConstValueIterator i = itr->value.Begin(); i != itr->value.End(); ++i) + if (i->IsString()) + AddUniqueElement(allProperties, *i); + } + + if (allProperties.Size() > 0) { + propertyCount_ = allProperties.Size(); + properties_ = static_cast(allocator_->Malloc(sizeof(Property) * propertyCount_)); + for (SizeType i = 0; i < propertyCount_; i++) { + new (&properties_[i]) Property(); + properties_[i].name = allProperties[i]; + properties_[i].schema = GetTypeless(); + } + } + } + + if (properties && properties->IsObject()) { + PointerType q = p.Append(GetPropertiesString(), allocator_); + for (ConstMemberIterator itr = properties->MemberBegin(); itr != properties->MemberEnd(); ++itr) { + SizeType index; + if (FindPropertyIndex(itr->name, &index)) + schemaDocument->CreateSchema(&properties_[index].schema, q.Append(itr->name, allocator_), itr->value, document); + } + } + + if (const ValueType* v = GetMember(value, GetPatternPropertiesString())) { + PointerType q = p.Append(GetPatternPropertiesString(), allocator_); + patternProperties_ = static_cast(allocator_->Malloc(sizeof(PatternProperty) * v->MemberCount())); + patternPropertyCount_ = 0; + + for (ConstMemberIterator itr = v->MemberBegin(); itr != v->MemberEnd(); ++itr) { + new (&patternProperties_[patternPropertyCount_]) PatternProperty(); + patternProperties_[patternPropertyCount_].pattern = CreatePattern(itr->name); + schemaDocument->CreateSchema(&patternProperties_[patternPropertyCount_].schema, q.Append(itr->name, allocator_), itr->value, document); + patternPropertyCount_++; + } + } + + if (required && required->IsArray()) + for (ConstValueIterator itr = required->Begin(); itr != required->End(); ++itr) + if (itr->IsString()) { + SizeType index; + if (FindPropertyIndex(*itr, &index)) { + properties_[index].required = true; + hasRequired_ = true; + } + } + + if (dependencies && dependencies->IsObject()) { + PointerType q = p.Append(GetDependenciesString(), allocator_); + hasDependencies_ = true; + for (ConstMemberIterator itr = dependencies->MemberBegin(); itr != dependencies->MemberEnd(); ++itr) { + SizeType sourceIndex; + if (FindPropertyIndex(itr->name, &sourceIndex)) { + if (itr->value.IsArray()) { + properties_[sourceIndex].dependencies = static_cast(allocator_->Malloc(sizeof(bool) * propertyCount_)); + std::memset(properties_[sourceIndex].dependencies, 0, sizeof(bool)* propertyCount_); + for (ConstValueIterator targetItr = itr->value.Begin(); targetItr != itr->value.End(); ++targetItr) { + SizeType targetIndex; + if (FindPropertyIndex(*targetItr, &targetIndex)) + properties_[sourceIndex].dependencies[targetIndex] = true; + } + } + else if (itr->value.IsObject()) { + hasSchemaDependencies_ = true; + schemaDocument->CreateSchema(&properties_[sourceIndex].dependenciesSchema, q.Append(itr->name, allocator_), itr->value, document); + properties_[sourceIndex].dependenciesValidatorIndex = validatorCount_; + validatorCount_++; + } + } + } + } + + if (const ValueType* v = GetMember(value, GetAdditionalPropertiesString())) { + if (v->IsBool()) + additionalProperties_ = v->GetBool(); + else if (v->IsObject()) + schemaDocument->CreateSchema(&additionalPropertiesSchema_, p.Append(GetAdditionalPropertiesString(), allocator_), *v, document); + } + + AssignIfExist(minProperties_, value, GetMinPropertiesString()); + AssignIfExist(maxProperties_, value, GetMaxPropertiesString()); + + // Array + if (const ValueType* v = GetMember(value, GetItemsString())) { + PointerType q = p.Append(GetItemsString(), allocator_); + if (v->IsObject()) // List validation + schemaDocument->CreateSchema(&itemsList_, q, *v, document); + else if (v->IsArray()) { // Tuple validation + itemsTuple_ = static_cast(allocator_->Malloc(sizeof(const Schema*) * v->Size())); + SizeType index = 0; + for (ConstValueIterator itr = v->Begin(); itr != v->End(); ++itr, index++) + schemaDocument->CreateSchema(&itemsTuple_[itemsTupleCount_++], q.Append(index, allocator_), *itr, document); + } + } + + AssignIfExist(minItems_, value, GetMinItemsString()); + AssignIfExist(maxItems_, value, GetMaxItemsString()); + + if (const ValueType* v = GetMember(value, GetAdditionalItemsString())) { + if (v->IsBool()) + additionalItems_ = v->GetBool(); + else if (v->IsObject()) + schemaDocument->CreateSchema(&additionalItemsSchema_, p.Append(GetAdditionalItemsString(), allocator_), *v, document); + } + + AssignIfExist(uniqueItems_, value, GetUniqueItemsString()); + + // String + AssignIfExist(minLength_, value, GetMinLengthString()); + AssignIfExist(maxLength_, value, GetMaxLengthString()); + + if (const ValueType* v = GetMember(value, GetPatternString())) + pattern_ = CreatePattern(*v); + + // Number + if (const ValueType* v = GetMember(value, GetMinimumString())) + if (v->IsNumber()) + minimum_.CopyFrom(*v, *allocator_); + + if (const ValueType* v = GetMember(value, GetMaximumString())) + if (v->IsNumber()) + maximum_.CopyFrom(*v, *allocator_); + + AssignIfExist(exclusiveMinimum_, value, GetExclusiveMinimumString()); + AssignIfExist(exclusiveMaximum_, value, GetExclusiveMaximumString()); + + if (const ValueType* v = GetMember(value, GetMultipleOfString())) + if (v->IsNumber() && v->GetDouble() > 0.0) + multipleOf_.CopyFrom(*v, *allocator_); + } + + ~Schema() { + if (allocator_) { + allocator_->Free(enum_); + } + if (properties_) { + for (SizeType i = 0; i < propertyCount_; i++) + properties_[i].~Property(); + AllocatorType::Free(properties_); + } + if (patternProperties_) { + for (SizeType i = 0; i < patternPropertyCount_; i++) + patternProperties_[i].~PatternProperty(); + AllocatorType::Free(patternProperties_); + } + AllocatorType::Free(itemsTuple_); +#if RAPIDJSON_SCHEMA_HAS_REGEX + if (pattern_) { + pattern_->~RegexType(); + allocator_->Free(pattern_); + } +#endif + } + + bool BeginValue(Context& context) const { + if (context.inArray) { + if (uniqueItems_) + context.valueUniqueness = true; + + if (itemsList_) + context.valueSchema = itemsList_; + else if (itemsTuple_) { + if (context.arrayElementIndex < itemsTupleCount_) + context.valueSchema = itemsTuple_[context.arrayElementIndex]; + else if (additionalItemsSchema_) + context.valueSchema = additionalItemsSchema_; + else if (additionalItems_) + context.valueSchema = GetTypeless(); + else + RAPIDJSON_INVALID_KEYWORD_RETURN(GetItemsString()); + } + else + context.valueSchema = GetTypeless(); + + context.arrayElementIndex++; + } + return true; + } + + RAPIDJSON_FORCEINLINE bool EndValue(Context& context) const { + if (context.patternPropertiesValidatorCount > 0) { + bool otherValid = false; + SizeType count = context.patternPropertiesValidatorCount; + if (context.objectPatternValidatorType != Context::kPatternValidatorOnly) + otherValid = context.patternPropertiesValidators[--count]->IsValid(); + + bool patternValid = true; + for (SizeType i = 0; i < count; i++) + if (!context.patternPropertiesValidators[i]->IsValid()) { + patternValid = false; + break; + } + + if (context.objectPatternValidatorType == Context::kPatternValidatorOnly) { + if (!patternValid) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetPatternPropertiesString()); + } + else if (context.objectPatternValidatorType == Context::kPatternValidatorWithProperty) { + if (!patternValid || !otherValid) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetPatternPropertiesString()); + } + else if (!patternValid && !otherValid) // kPatternValidatorWithAdditionalProperty) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetPatternPropertiesString()); + } + + if (enum_) { + const uint64_t h = context.factory.GetHashCode(context.hasher); + for (SizeType i = 0; i < enumCount_; i++) + if (enum_[i] == h) + goto foundEnum; + RAPIDJSON_INVALID_KEYWORD_RETURN(GetEnumString()); + foundEnum:; + } + + if (allOf_.schemas) + for (SizeType i = allOf_.begin; i < allOf_.begin + allOf_.count; i++) + if (!context.validators[i]->IsValid()) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetAllOfString()); + + if (anyOf_.schemas) { + for (SizeType i = anyOf_.begin; i < anyOf_.begin + anyOf_.count; i++) + if (context.validators[i]->IsValid()) + goto foundAny; + RAPIDJSON_INVALID_KEYWORD_RETURN(GetAnyOfString()); + foundAny:; + } + + if (oneOf_.schemas) { + bool oneValid = false; + for (SizeType i = oneOf_.begin; i < oneOf_.begin + oneOf_.count; i++) + if (context.validators[i]->IsValid()) { + if (oneValid) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetOneOfString()); + else + oneValid = true; + } + if (!oneValid) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetOneOfString()); + } + + if (not_ && context.validators[notValidatorIndex_]->IsValid()) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetNotString()); + + return true; + } + + bool Null(Context& context) const { + if (!(type_ & (1 << kNullSchemaType))) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetTypeString()); + return CreateParallelValidator(context); + } + + bool Bool(Context& context, bool) const { + if (!(type_ & (1 << kBooleanSchemaType))) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetTypeString()); + return CreateParallelValidator(context); + } + + bool Int(Context& context, int i) const { + if (!CheckInt(context, i)) + return false; + return CreateParallelValidator(context); + } + + bool Uint(Context& context, unsigned u) const { + if (!CheckUint(context, u)) + return false; + return CreateParallelValidator(context); + } + + bool Int64(Context& context, int64_t i) const { + if (!CheckInt(context, i)) + return false; + return CreateParallelValidator(context); + } + + bool Uint64(Context& context, uint64_t u) const { + if (!CheckUint(context, u)) + return false; + return CreateParallelValidator(context); + } + + bool Double(Context& context, double d) const { + if (!(type_ & (1 << kNumberSchemaType))) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetTypeString()); + + if (!minimum_.IsNull() && !CheckDoubleMinimum(context, d)) + return false; + + if (!maximum_.IsNull() && !CheckDoubleMaximum(context, d)) + return false; + + if (!multipleOf_.IsNull() && !CheckDoubleMultipleOf(context, d)) + return false; + + return CreateParallelValidator(context); + } + + bool String(Context& context, const Ch* str, SizeType length, bool) const { + if (!(type_ & (1 << kStringSchemaType))) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetTypeString()); + + if (minLength_ != 0 || maxLength_ != SizeType(~0)) { + SizeType count; + if (internal::CountStringCodePoint(str, length, &count)) { + if (count < minLength_) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetMinLengthString()); + if (count > maxLength_) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetMaxLengthString()); + } + } + + if (pattern_ && !IsPatternMatch(pattern_, str, length)) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetPatternString()); + + return CreateParallelValidator(context); + } + + bool StartObject(Context& context) const { + if (!(type_ & (1 << kObjectSchemaType))) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetTypeString()); + + if (hasDependencies_ || hasRequired_) { + context.propertyExist = static_cast(context.factory.MallocState(sizeof(bool) * propertyCount_)); + std::memset(context.propertyExist, 0, sizeof(bool) * propertyCount_); + } + + if (patternProperties_) { // pre-allocate schema array + SizeType count = patternPropertyCount_ + 1; // extra for valuePatternValidatorType + context.patternPropertiesSchemas = static_cast(context.factory.MallocState(sizeof(const SchemaType*) * count)); + context.patternPropertiesSchemaCount = 0; + std::memset(context.patternPropertiesSchemas, 0, sizeof(SchemaType*) * count); + } + + return CreateParallelValidator(context); + } + + bool Key(Context& context, const Ch* str, SizeType len, bool) const { + if (patternProperties_) { + context.patternPropertiesSchemaCount = 0; + for (SizeType i = 0; i < patternPropertyCount_; i++) + if (patternProperties_[i].pattern && IsPatternMatch(patternProperties_[i].pattern, str, len)) + context.patternPropertiesSchemas[context.patternPropertiesSchemaCount++] = patternProperties_[i].schema; + } + + SizeType index; + if (FindPropertyIndex(ValueType(str, len).Move(), &index)) { + if (context.patternPropertiesSchemaCount > 0) { + context.patternPropertiesSchemas[context.patternPropertiesSchemaCount++] = properties_[index].schema; + context.valueSchema = GetTypeless(); + context.valuePatternValidatorType = Context::kPatternValidatorWithProperty; + } + else + context.valueSchema = properties_[index].schema; + + if (context.propertyExist) + context.propertyExist[index] = true; + + return true; + } + + if (additionalPropertiesSchema_) { + if (additionalPropertiesSchema_ && context.patternPropertiesSchemaCount > 0) { + context.patternPropertiesSchemas[context.patternPropertiesSchemaCount++] = additionalPropertiesSchema_; + context.valueSchema = GetTypeless(); + context.valuePatternValidatorType = Context::kPatternValidatorWithAdditionalProperty; + } + else + context.valueSchema = additionalPropertiesSchema_; + return true; + } + else if (additionalProperties_) { + context.valueSchema = GetTypeless(); + return true; + } + + if (context.patternPropertiesSchemaCount == 0) // patternProperties are not additional properties + RAPIDJSON_INVALID_KEYWORD_RETURN(GetAdditionalPropertiesString()); + + return true; + } + + bool EndObject(Context& context, SizeType memberCount) const { + if (hasRequired_) + for (SizeType index = 0; index < propertyCount_; index++) + if (properties_[index].required) + if (!context.propertyExist[index]) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetRequiredString()); + + if (memberCount < minProperties_) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetMinPropertiesString()); + + if (memberCount > maxProperties_) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetMaxPropertiesString()); + + if (hasDependencies_) { + for (SizeType sourceIndex = 0; sourceIndex < propertyCount_; sourceIndex++) + if (context.propertyExist[sourceIndex]) { + if (properties_[sourceIndex].dependencies) { + for (SizeType targetIndex = 0; targetIndex < propertyCount_; targetIndex++) + if (properties_[sourceIndex].dependencies[targetIndex] && !context.propertyExist[targetIndex]) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetDependenciesString()); + } + else if (properties_[sourceIndex].dependenciesSchema) + if (!context.validators[properties_[sourceIndex].dependenciesValidatorIndex]->IsValid()) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetDependenciesString()); + } + } + + return true; + } + + bool StartArray(Context& context) const { + if (!(type_ & (1 << kArraySchemaType))) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetTypeString()); + + context.arrayElementIndex = 0; + context.inArray = true; + + return CreateParallelValidator(context); + } + + bool EndArray(Context& context, SizeType elementCount) const { + context.inArray = false; + + if (elementCount < minItems_) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetMinItemsString()); + + if (elementCount > maxItems_) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetMaxItemsString()); + + return true; + } + + // Generate functions for string literal according to Ch +#define RAPIDJSON_STRING_(name, ...) \ + static const ValueType& Get##name##String() {\ + static const Ch s[] = { __VA_ARGS__, '\0' };\ + static const ValueType v(s, sizeof(s) / sizeof(Ch) - 1);\ + return v;\ + } + + RAPIDJSON_STRING_(Null, 'n', 'u', 'l', 'l') + RAPIDJSON_STRING_(Boolean, 'b', 'o', 'o', 'l', 'e', 'a', 'n') + RAPIDJSON_STRING_(Object, 'o', 'b', 'j', 'e', 'c', 't') + RAPIDJSON_STRING_(Array, 'a', 'r', 'r', 'a', 'y') + RAPIDJSON_STRING_(String, 's', 't', 'r', 'i', 'n', 'g') + RAPIDJSON_STRING_(Number, 'n', 'u', 'm', 'b', 'e', 'r') + RAPIDJSON_STRING_(Integer, 'i', 'n', 't', 'e', 'g', 'e', 'r') + RAPIDJSON_STRING_(Type, 't', 'y', 'p', 'e') + RAPIDJSON_STRING_(Enum, 'e', 'n', 'u', 'm') + RAPIDJSON_STRING_(AllOf, 'a', 'l', 'l', 'O', 'f') + RAPIDJSON_STRING_(AnyOf, 'a', 'n', 'y', 'O', 'f') + RAPIDJSON_STRING_(OneOf, 'o', 'n', 'e', 'O', 'f') + RAPIDJSON_STRING_(Not, 'n', 'o', 't') + RAPIDJSON_STRING_(Properties, 'p', 'r', 'o', 'p', 'e', 'r', 't', 'i', 'e', 's') + RAPIDJSON_STRING_(Required, 'r', 'e', 'q', 'u', 'i', 'r', 'e', 'd') + RAPIDJSON_STRING_(Dependencies, 'd', 'e', 'p', 'e', 'n', 'd', 'e', 'n', 'c', 'i', 'e', 's') + RAPIDJSON_STRING_(PatternProperties, 'p', 'a', 't', 't', 'e', 'r', 'n', 'P', 'r', 'o', 'p', 'e', 'r', 't', 'i', 'e', 's') + RAPIDJSON_STRING_(AdditionalProperties, 'a', 'd', 'd', 'i', 't', 'i', 'o', 'n', 'a', 'l', 'P', 'r', 'o', 'p', 'e', 'r', 't', 'i', 'e', 's') + RAPIDJSON_STRING_(MinProperties, 'm', 'i', 'n', 'P', 'r', 'o', 'p', 'e', 'r', 't', 'i', 'e', 's') + RAPIDJSON_STRING_(MaxProperties, 'm', 'a', 'x', 'P', 'r', 'o', 'p', 'e', 'r', 't', 'i', 'e', 's') + RAPIDJSON_STRING_(Items, 'i', 't', 'e', 'm', 's') + RAPIDJSON_STRING_(MinItems, 'm', 'i', 'n', 'I', 't', 'e', 'm', 's') + RAPIDJSON_STRING_(MaxItems, 'm', 'a', 'x', 'I', 't', 'e', 'm', 's') + RAPIDJSON_STRING_(AdditionalItems, 'a', 'd', 'd', 'i', 't', 'i', 'o', 'n', 'a', 'l', 'I', 't', 'e', 'm', 's') + RAPIDJSON_STRING_(UniqueItems, 'u', 'n', 'i', 'q', 'u', 'e', 'I', 't', 'e', 'm', 's') + RAPIDJSON_STRING_(MinLength, 'm', 'i', 'n', 'L', 'e', 'n', 'g', 't', 'h') + RAPIDJSON_STRING_(MaxLength, 'm', 'a', 'x', 'L', 'e', 'n', 'g', 't', 'h') + RAPIDJSON_STRING_(Pattern, 'p', 'a', 't', 't', 'e', 'r', 'n') + RAPIDJSON_STRING_(Minimum, 'm', 'i', 'n', 'i', 'm', 'u', 'm') + RAPIDJSON_STRING_(Maximum, 'm', 'a', 'x', 'i', 'm', 'u', 'm') + RAPIDJSON_STRING_(ExclusiveMinimum, 'e', 'x', 'c', 'l', 'u', 's', 'i', 'v', 'e', 'M', 'i', 'n', 'i', 'm', 'u', 'm') + RAPIDJSON_STRING_(ExclusiveMaximum, 'e', 'x', 'c', 'l', 'u', 's', 'i', 'v', 'e', 'M', 'a', 'x', 'i', 'm', 'u', 'm') + RAPIDJSON_STRING_(MultipleOf, 'm', 'u', 'l', 't', 'i', 'p', 'l', 'e', 'O', 'f') + +#undef RAPIDJSON_STRING_ + +private: + enum SchemaValueType { + kNullSchemaType, + kBooleanSchemaType, + kObjectSchemaType, + kArraySchemaType, + kStringSchemaType, + kNumberSchemaType, + kIntegerSchemaType, + kTotalSchemaType + }; + +#if RAPIDJSON_SCHEMA_USE_INTERNALREGEX + typedef internal::GenericRegex RegexType; +#elif RAPIDJSON_SCHEMA_USE_STDREGEX + typedef std::basic_regex RegexType; +#else + typedef char RegexType; +#endif + + struct SchemaArray { + SchemaArray() : schemas(), count() {} + ~SchemaArray() { AllocatorType::Free(schemas); } + const SchemaType** schemas; + SizeType begin; // begin index of context.validators + SizeType count; + }; + + static const SchemaType* GetTypeless() { + static SchemaType typeless(0, PointerType(), ValueType(kObjectType).Move(), ValueType(kObjectType).Move(), 0); + return &typeless; + } + + template + void AddUniqueElement(V1& a, const V2& v) { + for (typename V1::ConstValueIterator itr = a.Begin(); itr != a.End(); ++itr) + if (*itr == v) + return; + V1 c(v, *allocator_); + a.PushBack(c, *allocator_); + } + + static const ValueType* GetMember(const ValueType& value, const ValueType& name) { + typename ValueType::ConstMemberIterator itr = value.FindMember(name); + return itr != value.MemberEnd() ? &(itr->value) : 0; + } + + static void AssignIfExist(bool& out, const ValueType& value, const ValueType& name) { + if (const ValueType* v = GetMember(value, name)) + if (v->IsBool()) + out = v->GetBool(); + } + + static void AssignIfExist(SizeType& out, const ValueType& value, const ValueType& name) { + if (const ValueType* v = GetMember(value, name)) + if (v->IsUint64() && v->GetUint64() <= SizeType(~0)) + out = static_cast(v->GetUint64()); + } + + void AssignIfExist(SchemaArray& out, SchemaDocumentType& schemaDocument, const PointerType& p, const ValueType& value, const ValueType& name, const ValueType& document) { + if (const ValueType* v = GetMember(value, name)) { + if (v->IsArray() && v->Size() > 0) { + PointerType q = p.Append(name, allocator_); + out.count = v->Size(); + out.schemas = static_cast(allocator_->Malloc(out.count * sizeof(const Schema*))); + memset(out.schemas, 0, sizeof(Schema*)* out.count); + for (SizeType i = 0; i < out.count; i++) + schemaDocument.CreateSchema(&out.schemas[i], q.Append(i, allocator_), (*v)[i], document); + out.begin = validatorCount_; + validatorCount_ += out.count; + } + } + } + +#if RAPIDJSON_SCHEMA_USE_INTERNALREGEX + template + RegexType* CreatePattern(const ValueType& value) { + if (value.IsString()) { + RegexType* r = new (allocator_->Malloc(sizeof(RegexType))) RegexType(value.GetString()); + if (!r->IsValid()) { + r->~RegexType(); + AllocatorType::Free(r); + r = 0; + } + return r; + } + return 0; + } + + static bool IsPatternMatch(const RegexType* pattern, const Ch *str, SizeType) { + return pattern->Search(str); + } +#elif RAPIDJSON_SCHEMA_USE_STDREGEX + template + RegexType* CreatePattern(const ValueType& value) { + if (value.IsString()) + try { + return new (allocator_->Malloc(sizeof(RegexType))) RegexType(value.GetString(), std::size_t(value.GetStringLength()), std::regex_constants::ECMAScript); + } + catch (const std::regex_error&) { + } + return 0; + } + + static bool IsPatternMatch(const RegexType* pattern, const Ch *str, SizeType length) { + std::match_results r; + return std::regex_search(str, str + length, r, *pattern); + } +#else + template + RegexType* CreatePattern(const ValueType&) { return 0; } + + static bool IsPatternMatch(const RegexType*, const Ch *, SizeType) { return true; } +#endif // RAPIDJSON_SCHEMA_USE_STDREGEX + + void AddType(const ValueType& type) { + if (type == GetNullString() ) type_ |= 1 << kNullSchemaType; + else if (type == GetBooleanString()) type_ |= 1 << kBooleanSchemaType; + else if (type == GetObjectString() ) type_ |= 1 << kObjectSchemaType; + else if (type == GetArrayString() ) type_ |= 1 << kArraySchemaType; + else if (type == GetStringString() ) type_ |= 1 << kStringSchemaType; + else if (type == GetIntegerString()) type_ |= 1 << kIntegerSchemaType; + else if (type == GetNumberString() ) type_ |= (1 << kNumberSchemaType) | (1 << kIntegerSchemaType); + } + + bool CreateParallelValidator(Context& context) const { + if (enum_ || context.arrayUniqueness) + context.hasher = context.factory.CreateHasher(); + + if (validatorCount_) { + RAPIDJSON_ASSERT(context.validators == 0); + context.validators = static_cast(context.factory.MallocState(sizeof(ISchemaValidator*) * validatorCount_)); + context.validatorCount = validatorCount_; + + if (allOf_.schemas) + CreateSchemaValidators(context, allOf_); + + if (anyOf_.schemas) + CreateSchemaValidators(context, anyOf_); + + if (oneOf_.schemas) + CreateSchemaValidators(context, oneOf_); + + if (not_) + context.validators[notValidatorIndex_] = context.factory.CreateSchemaValidator(*not_); + + if (hasSchemaDependencies_) { + for (SizeType i = 0; i < propertyCount_; i++) + if (properties_[i].dependenciesSchema) + context.validators[properties_[i].dependenciesValidatorIndex] = context.factory.CreateSchemaValidator(*properties_[i].dependenciesSchema); + } + } + + return true; + } + + void CreateSchemaValidators(Context& context, const SchemaArray& schemas) const { + for (SizeType i = 0; i < schemas.count; i++) + context.validators[schemas.begin + i] = context.factory.CreateSchemaValidator(*schemas.schemas[i]); + } + + // O(n) + bool FindPropertyIndex(const ValueType& name, SizeType* outIndex) const { + SizeType len = name.GetStringLength(); + const Ch* str = name.GetString(); + for (SizeType index = 0; index < propertyCount_; index++) + if (properties_[index].name.GetStringLength() == len && + (std::memcmp(properties_[index].name.GetString(), str, sizeof(Ch) * len) == 0)) + { + *outIndex = index; + return true; + } + return false; + } + + bool CheckInt(Context& context, int64_t i) const { + if (!(type_ & ((1 << kIntegerSchemaType) | (1 << kNumberSchemaType)))) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetTypeString()); + + if (!minimum_.IsNull()) { + if (minimum_.IsInt64()) { + if (exclusiveMinimum_ ? i <= minimum_.GetInt64() : i < minimum_.GetInt64()) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetMinimumString()); + } + else if (minimum_.IsUint64()) { + RAPIDJSON_INVALID_KEYWORD_RETURN(GetMinimumString()); // i <= max(int64_t) < minimum.GetUint64() + } + else if (!CheckDoubleMinimum(context, static_cast(i))) + return false; + } + + if (!maximum_.IsNull()) { + if (maximum_.IsInt64()) { + if (exclusiveMaximum_ ? i >= maximum_.GetInt64() : i > maximum_.GetInt64()) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetMaximumString()); + } + else if (maximum_.IsUint64()) + /* do nothing */; // i <= max(int64_t) < maximum_.GetUint64() + else if (!CheckDoubleMaximum(context, static_cast(i))) + return false; + } + + if (!multipleOf_.IsNull()) { + if (multipleOf_.IsUint64()) { + if (static_cast(i >= 0 ? i : -i) % multipleOf_.GetUint64() != 0) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetMultipleOfString()); + } + else if (!CheckDoubleMultipleOf(context, static_cast(i))) + return false; + } + + return true; + } + + bool CheckUint(Context& context, uint64_t i) const { + if (!(type_ & ((1 << kIntegerSchemaType) | (1 << kNumberSchemaType)))) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetTypeString()); + + if (!minimum_.IsNull()) { + if (minimum_.IsUint64()) { + if (exclusiveMinimum_ ? i <= minimum_.GetUint64() : i < minimum_.GetUint64()) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetMinimumString()); + } + else if (minimum_.IsInt64()) + /* do nothing */; // i >= 0 > minimum.Getint64() + else if (!CheckDoubleMinimum(context, static_cast(i))) + return false; + } + + if (!maximum_.IsNull()) { + if (maximum_.IsUint64()) { + if (exclusiveMaximum_ ? i >= maximum_.GetUint64() : i > maximum_.GetUint64()) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetMaximumString()); + } + else if (maximum_.IsInt64()) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetMaximumString()); // i >= 0 > maximum_ + else if (!CheckDoubleMaximum(context, static_cast(i))) + return false; + } + + if (!multipleOf_.IsNull()) { + if (multipleOf_.IsUint64()) { + if (i % multipleOf_.GetUint64() != 0) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetMultipleOfString()); + } + else if (!CheckDoubleMultipleOf(context, static_cast(i))) + return false; + } + + return true; + } + + bool CheckDoubleMinimum(Context& context, double d) const { + if (exclusiveMinimum_ ? d <= minimum_.GetDouble() : d < minimum_.GetDouble()) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetMinimumString()); + return true; + } + + bool CheckDoubleMaximum(Context& context, double d) const { + if (exclusiveMaximum_ ? d >= maximum_.GetDouble() : d > maximum_.GetDouble()) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetMaximumString()); + return true; + } + + bool CheckDoubleMultipleOf(Context& context, double d) const { + double a = std::abs(d), b = std::abs(multipleOf_.GetDouble()); + double q = std::floor(a / b); + double r = a - q * b; + if (r > 0.0) + RAPIDJSON_INVALID_KEYWORD_RETURN(GetMultipleOfString()); + return true; + } + + struct Property { + Property() : schema(), dependenciesSchema(), dependenciesValidatorIndex(), dependencies(), required(false) {} + ~Property() { AllocatorType::Free(dependencies); } + SValue name; + const SchemaType* schema; + const SchemaType* dependenciesSchema; + SizeType dependenciesValidatorIndex; + bool* dependencies; + bool required; + }; + + struct PatternProperty { + PatternProperty() : schema(), pattern() {} + ~PatternProperty() { + if (pattern) { + pattern->~RegexType(); + AllocatorType::Free(pattern); + } + } + const SchemaType* schema; + RegexType* pattern; + }; + + AllocatorType* allocator_; + uint64_t* enum_; + SizeType enumCount_; + SchemaArray allOf_; + SchemaArray anyOf_; + SchemaArray oneOf_; + const SchemaType* not_; + unsigned type_; // bitmask of kSchemaType + SizeType validatorCount_; + SizeType notValidatorIndex_; + + Property* properties_; + const SchemaType* additionalPropertiesSchema_; + PatternProperty* patternProperties_; + SizeType patternPropertyCount_; + SizeType propertyCount_; + SizeType minProperties_; + SizeType maxProperties_; + bool additionalProperties_; + bool hasDependencies_; + bool hasRequired_; + bool hasSchemaDependencies_; + + const SchemaType* additionalItemsSchema_; + const SchemaType* itemsList_; + const SchemaType** itemsTuple_; + SizeType itemsTupleCount_; + SizeType minItems_; + SizeType maxItems_; + bool additionalItems_; + bool uniqueItems_; + + RegexType* pattern_; + SizeType minLength_; + SizeType maxLength_; + + SValue minimum_; + SValue maximum_; + SValue multipleOf_; + bool exclusiveMinimum_; + bool exclusiveMaximum_; +}; + +template +struct TokenHelper { + RAPIDJSON_FORCEINLINE static void AppendIndexToken(Stack& documentStack, SizeType index) { + *documentStack.template Push() = '/'; + char buffer[21]; + size_t length = static_cast((sizeof(SizeType) == 4 ? u32toa(index, buffer) : u64toa(index, buffer)) - buffer); + for (size_t i = 0; i < length; i++) + *documentStack.template Push() = buffer[i]; + } +}; + +// Partial specialized version for char to prevent buffer copying. +template +struct TokenHelper { + RAPIDJSON_FORCEINLINE static void AppendIndexToken(Stack& documentStack, SizeType index) { + if (sizeof(SizeType) == 4) { + char *buffer = documentStack.template Push(1 + 10); // '/' + uint + *buffer++ = '/'; + const char* end = internal::u32toa(index, buffer); + documentStack.template Pop(static_cast(10 - (end - buffer))); + } + else { + char *buffer = documentStack.template Push(1 + 20); // '/' + uint64 + *buffer++ = '/'; + const char* end = internal::u64toa(index, buffer); + documentStack.template Pop(static_cast(20 - (end - buffer))); + } + } +}; + +} // namespace internal + +/////////////////////////////////////////////////////////////////////////////// +// IGenericRemoteSchemaDocumentProvider + +template +class IGenericRemoteSchemaDocumentProvider { +public: + typedef typename SchemaDocumentType::Ch Ch; + + virtual ~IGenericRemoteSchemaDocumentProvider() {} + virtual const SchemaDocumentType* GetRemoteDocument(const Ch* uri, SizeType length) = 0; +}; + +/////////////////////////////////////////////////////////////////////////////// +// GenericSchemaDocument + +//! JSON schema document. +/*! + A JSON schema document is a compiled version of a JSON schema. + It is basically a tree of internal::Schema. + + \note This is an immutable class (i.e. its instance cannot be modified after construction). + \tparam ValueT Type of JSON value (e.g. \c Value ), which also determine the encoding. + \tparam Allocator Allocator type for allocating memory of this document. +*/ +template +class GenericSchemaDocument { +public: + typedef ValueT ValueType; + typedef IGenericRemoteSchemaDocumentProvider IRemoteSchemaDocumentProviderType; + typedef Allocator AllocatorType; + typedef typename ValueType::EncodingType EncodingType; + typedef typename EncodingType::Ch Ch; + typedef internal::Schema SchemaType; + typedef GenericPointer PointerType; + friend class internal::Schema; + template + friend class GenericSchemaValidator; + + //! Constructor. + /*! + Compile a JSON document into schema document. + + \param document A JSON document as source. + \param remoteProvider An optional remote schema document provider for resolving remote reference. Can be null. + \param allocator An optional allocator instance for allocating memory. Can be null. + */ + explicit GenericSchemaDocument(const ValueType& document, IRemoteSchemaDocumentProviderType* remoteProvider = 0, Allocator* allocator = 0) : + remoteProvider_(remoteProvider), + allocator_(allocator), + ownAllocator_(), + root_(), + schemaMap_(allocator, kInitialSchemaMapSize), + schemaRef_(allocator, kInitialSchemaRefSize) + { + if (!allocator_) + ownAllocator_ = allocator_ = RAPIDJSON_NEW(Allocator()); + + // Generate root schema, it will call CreateSchema() to create sub-schemas, + // And call AddRefSchema() if there are $ref. + CreateSchemaRecursive(&root_, PointerType(), document, document); + + // Resolve $ref + while (!schemaRef_.Empty()) { + SchemaRefEntry* refEntry = schemaRef_.template Pop(1); + if (const SchemaType* s = GetSchema(refEntry->target)) { + if (refEntry->schema) + *refEntry->schema = s; + + // Create entry in map if not exist + if (!GetSchema(refEntry->source)) { + new (schemaMap_.template Push()) SchemaEntry(refEntry->source, const_cast(s), false, allocator_); + } + } + refEntry->~SchemaRefEntry(); + } + + RAPIDJSON_ASSERT(root_ != 0); + + schemaRef_.ShrinkToFit(); // Deallocate all memory for ref + } + +#if RAPIDJSON_HAS_CXX11_RVALUE_REFS + //! Move constructor in C++11 + GenericSchemaDocument(GenericSchemaDocument&& rhs) RAPIDJSON_NOEXCEPT : + remoteProvider_(rhs.remoteProvider_), + allocator_(rhs.allocator_), + ownAllocator_(rhs.ownAllocator_), + root_(rhs.root_), + schemaMap_(std::move(rhs.schemaMap_)), + schemaRef_(std::move(rhs.schemaRef_)) + { + rhs.remoteProvider_ = 0; + rhs.allocator_ = 0; + rhs.ownAllocator_ = 0; + } +#endif + + //! Destructor + ~GenericSchemaDocument() { + while (!schemaMap_.Empty()) + schemaMap_.template Pop(1)->~SchemaEntry(); + + RAPIDJSON_DELETE(ownAllocator_); + } + + //! Get the root schema. + const SchemaType& GetRoot() const { return *root_; } + +private: + //! Prohibit copying + GenericSchemaDocument(const GenericSchemaDocument&); + //! Prohibit assignment + GenericSchemaDocument& operator=(const GenericSchemaDocument&); + + struct SchemaRefEntry { + SchemaRefEntry(const PointerType& s, const PointerType& t, const SchemaType** outSchema, Allocator *allocator) : source(s, allocator), target(t, allocator), schema(outSchema) {} + PointerType source; + PointerType target; + const SchemaType** schema; + }; + + struct SchemaEntry { + SchemaEntry(const PointerType& p, SchemaType* s, bool o, Allocator* allocator) : pointer(p, allocator), schema(s), owned(o) {} + ~SchemaEntry() { + if (owned) { + schema->~SchemaType(); + Allocator::Free(schema); + } + } + PointerType pointer; + SchemaType* schema; + bool owned; + }; + + void CreateSchemaRecursive(const SchemaType** schema, const PointerType& pointer, const ValueType& v, const ValueType& document) { + if (schema) + *schema = SchemaType::GetTypeless(); + + if (v.GetType() == kObjectType) { + const SchemaType* s = GetSchema(pointer); + if (!s) + CreateSchema(schema, pointer, v, document); + + for (typename ValueType::ConstMemberIterator itr = v.MemberBegin(); itr != v.MemberEnd(); ++itr) + CreateSchemaRecursive(0, pointer.Append(itr->name, allocator_), itr->value, document); + } + else if (v.GetType() == kArrayType) + for (SizeType i = 0; i < v.Size(); i++) + CreateSchemaRecursive(0, pointer.Append(i, allocator_), v[i], document); + } + + void CreateSchema(const SchemaType** schema, const PointerType& pointer, const ValueType& v, const ValueType& document) { + RAPIDJSON_ASSERT(pointer.IsValid()); + if (v.IsObject()) { + if (!HandleRefSchema(pointer, schema, v, document)) { + SchemaType* s = new (allocator_->Malloc(sizeof(SchemaType))) SchemaType(this, pointer, v, document, allocator_); + new (schemaMap_.template Push()) SchemaEntry(pointer, s, true, allocator_); + if (schema) + *schema = s; + } + } + } + + bool HandleRefSchema(const PointerType& source, const SchemaType** schema, const ValueType& v, const ValueType& document) { + static const Ch kRefString[] = { '$', 'r', 'e', 'f', '\0' }; + static const ValueType kRefValue(kRefString, 4); + + typename ValueType::ConstMemberIterator itr = v.FindMember(kRefValue); + if (itr == v.MemberEnd()) + return false; + + if (itr->value.IsString()) { + SizeType len = itr->value.GetStringLength(); + if (len > 0) { + const Ch* s = itr->value.GetString(); + SizeType i = 0; + while (i < len && s[i] != '#') // Find the first # + i++; + + if (i > 0) { // Remote reference, resolve immediately + if (remoteProvider_) { + if (const GenericSchemaDocument* remoteDocument = remoteProvider_->GetRemoteDocument(s, i - 1)) { + PointerType pointer(&s[i], len - i, allocator_); + if (pointer.IsValid()) { + if (const SchemaType* sc = remoteDocument->GetSchema(pointer)) { + if (schema) + *schema = sc; + return true; + } + } + } + } + } + else if (s[i] == '#') { // Local reference, defer resolution + PointerType pointer(&s[i], len - i, allocator_); + if (pointer.IsValid()) { + if (const ValueType* nv = pointer.Get(document)) + if (HandleRefSchema(source, schema, *nv, document)) + return true; + + new (schemaRef_.template Push()) SchemaRefEntry(source, pointer, schema, allocator_); + return true; + } + } + } + } + return false; + } + + const SchemaType* GetSchema(const PointerType& pointer) const { + for (const SchemaEntry* target = schemaMap_.template Bottom(); target != schemaMap_.template End(); ++target) + if (pointer == target->pointer) + return target->schema; + return 0; + } + + PointerType GetPointer(const SchemaType* schema) const { + for (const SchemaEntry* target = schemaMap_.template Bottom(); target != schemaMap_.template End(); ++target) + if (schema == target->schema) + return target->pointer; + return PointerType(); + } + + static const size_t kInitialSchemaMapSize = 64; + static const size_t kInitialSchemaRefSize = 64; + + IRemoteSchemaDocumentProviderType* remoteProvider_; + Allocator *allocator_; + Allocator *ownAllocator_; + const SchemaType* root_; //!< Root schema. + internal::Stack schemaMap_; // Stores created Pointer -> Schemas + internal::Stack schemaRef_; // Stores Pointer from $ref and schema which holds the $ref +}; + +//! GenericSchemaDocument using Value type. +typedef GenericSchemaDocument SchemaDocument; +//! IGenericRemoteSchemaDocumentProvider using SchemaDocument. +typedef IGenericRemoteSchemaDocumentProvider IRemoteSchemaDocumentProvider; + +/////////////////////////////////////////////////////////////////////////////// +// GenericSchemaValidator + +//! JSON Schema Validator. +/*! + A SAX style JSON schema validator. + It uses a \c GenericSchemaDocument to validate SAX events. + It delegates the incoming SAX events to an output handler. + The default output handler does nothing. + It can be reused multiple times by calling \c Reset(). + + \tparam SchemaDocumentType Type of schema document. + \tparam OutputHandler Type of output handler. Default handler does nothing. + \tparam StateAllocator Allocator for storing the internal validation states. +*/ +template < + typename SchemaDocumentType, + typename OutputHandler = BaseReaderHandler, + typename StateAllocator = CrtAllocator> +class GenericSchemaValidator : + public internal::ISchemaStateFactory, + public internal::ISchemaValidator +{ +public: + typedef typename SchemaDocumentType::SchemaType SchemaType; + typedef typename SchemaDocumentType::PointerType PointerType; + typedef typename SchemaType::EncodingType EncodingType; + typedef typename EncodingType::Ch Ch; + + //! Constructor without output handler. + /*! + \param schemaDocument The schema document to conform to. + \param allocator Optional allocator for storing internal validation states. + \param schemaStackCapacity Optional initial capacity of schema path stack. + \param documentStackCapacity Optional initial capacity of document path stack. + */ + GenericSchemaValidator( + const SchemaDocumentType& schemaDocument, + StateAllocator* allocator = 0, + size_t schemaStackCapacity = kDefaultSchemaStackCapacity, + size_t documentStackCapacity = kDefaultDocumentStackCapacity) + : + schemaDocument_(&schemaDocument), + root_(schemaDocument.GetRoot()), + outputHandler_(GetNullHandler()), + stateAllocator_(allocator), + ownStateAllocator_(0), + schemaStack_(allocator, schemaStackCapacity), + documentStack_(allocator, documentStackCapacity), + valid_(true) +#if RAPIDJSON_SCHEMA_VERBOSE + , depth_(0) +#endif + { + } + + //! Constructor with output handler. + /*! + \param schemaDocument The schema document to conform to. + \param allocator Optional allocator for storing internal validation states. + \param schemaStackCapacity Optional initial capacity of schema path stack. + \param documentStackCapacity Optional initial capacity of document path stack. + */ + GenericSchemaValidator( + const SchemaDocumentType& schemaDocument, + OutputHandler& outputHandler, + StateAllocator* allocator = 0, + size_t schemaStackCapacity = kDefaultSchemaStackCapacity, + size_t documentStackCapacity = kDefaultDocumentStackCapacity) + : + schemaDocument_(&schemaDocument), + root_(schemaDocument.GetRoot()), + outputHandler_(outputHandler), + stateAllocator_(allocator), + ownStateAllocator_(0), + schemaStack_(allocator, schemaStackCapacity), + documentStack_(allocator, documentStackCapacity), + valid_(true) +#if RAPIDJSON_SCHEMA_VERBOSE + , depth_(0) +#endif + { + } + + //! Destructor. + ~GenericSchemaValidator() { + Reset(); + RAPIDJSON_DELETE(ownStateAllocator_); + } + + //! Reset the internal states. + void Reset() { + while (!schemaStack_.Empty()) + PopSchema(); + documentStack_.Clear(); + valid_ = true; + } + + //! Checks whether the current state is valid. + // Implementation of ISchemaValidator + virtual bool IsValid() const { return valid_; } + + //! Gets the JSON pointer pointed to the invalid schema. + PointerType GetInvalidSchemaPointer() const { + return schemaStack_.Empty() ? PointerType() : schemaDocument_->GetPointer(&CurrentSchema()); + } + + //! Gets the keyword of invalid schema. + const Ch* GetInvalidSchemaKeyword() const { + return schemaStack_.Empty() ? 0 : CurrentContext().invalidKeyword; + } + + //! Gets the JSON pointer pointed to the invalid value. + PointerType GetInvalidDocumentPointer() const { + return documentStack_.Empty() ? PointerType() : PointerType(documentStack_.template Bottom(), documentStack_.GetSize() / sizeof(Ch)); + } + +#if RAPIDJSON_SCHEMA_VERBOSE +#define RAPIDJSON_SCHEMA_HANDLE_BEGIN_VERBOSE_() \ +RAPIDJSON_MULTILINEMACRO_BEGIN\ + *documentStack_.template Push() = '\0';\ + documentStack_.template Pop(1);\ + internal::PrintInvalidDocument(documentStack_.template Bottom());\ +RAPIDJSON_MULTILINEMACRO_END +#else +#define RAPIDJSON_SCHEMA_HANDLE_BEGIN_VERBOSE_() +#endif + +#define RAPIDJSON_SCHEMA_HANDLE_BEGIN_(method, arg1)\ + if (!valid_) return false; \ + if (!BeginValue() || !CurrentSchema().method arg1) {\ + RAPIDJSON_SCHEMA_HANDLE_BEGIN_VERBOSE_();\ + return valid_ = false;\ + } + +#define RAPIDJSON_SCHEMA_HANDLE_PARALLEL_(method, arg2)\ + for (Context* context = schemaStack_.template Bottom(); context != schemaStack_.template End(); context++) {\ + if (context->hasher)\ + static_cast(context->hasher)->method arg2;\ + if (context->validators)\ + for (SizeType i_ = 0; i_ < context->validatorCount; i_++)\ + static_cast(context->validators[i_])->method arg2;\ + if (context->patternPropertiesValidators)\ + for (SizeType i_ = 0; i_ < context->patternPropertiesValidatorCount; i_++)\ + static_cast(context->patternPropertiesValidators[i_])->method arg2;\ + } + +#define RAPIDJSON_SCHEMA_HANDLE_END_(method, arg2)\ + return valid_ = EndValue() && outputHandler_.method arg2 + +#define RAPIDJSON_SCHEMA_HANDLE_VALUE_(method, arg1, arg2) \ + RAPIDJSON_SCHEMA_HANDLE_BEGIN_ (method, arg1);\ + RAPIDJSON_SCHEMA_HANDLE_PARALLEL_(method, arg2);\ + RAPIDJSON_SCHEMA_HANDLE_END_ (method, arg2) + + bool Null() { RAPIDJSON_SCHEMA_HANDLE_VALUE_(Null, (CurrentContext() ), ( )); } + bool Bool(bool b) { RAPIDJSON_SCHEMA_HANDLE_VALUE_(Bool, (CurrentContext(), b), (b)); } + bool Int(int i) { RAPIDJSON_SCHEMA_HANDLE_VALUE_(Int, (CurrentContext(), i), (i)); } + bool Uint(unsigned u) { RAPIDJSON_SCHEMA_HANDLE_VALUE_(Uint, (CurrentContext(), u), (u)); } + bool Int64(int64_t i) { RAPIDJSON_SCHEMA_HANDLE_VALUE_(Int64, (CurrentContext(), i), (i)); } + bool Uint64(uint64_t u) { RAPIDJSON_SCHEMA_HANDLE_VALUE_(Uint64, (CurrentContext(), u), (u)); } + bool Double(double d) { RAPIDJSON_SCHEMA_HANDLE_VALUE_(Double, (CurrentContext(), d), (d)); } + bool RawNumber(const Ch* str, SizeType length, bool copy) + { RAPIDJSON_SCHEMA_HANDLE_VALUE_(String, (CurrentContext(), str, length, copy), (str, length, copy)); } + bool String(const Ch* str, SizeType length, bool copy) + { RAPIDJSON_SCHEMA_HANDLE_VALUE_(String, (CurrentContext(), str, length, copy), (str, length, copy)); } + + bool StartObject() { + RAPIDJSON_SCHEMA_HANDLE_BEGIN_(StartObject, (CurrentContext())); + RAPIDJSON_SCHEMA_HANDLE_PARALLEL_(StartObject, ()); + return valid_ = outputHandler_.StartObject(); + } + + bool Key(const Ch* str, SizeType len, bool copy) { + if (!valid_) return false; + AppendToken(str, len); + if (!CurrentSchema().Key(CurrentContext(), str, len, copy)) return valid_ = false; + RAPIDJSON_SCHEMA_HANDLE_PARALLEL_(Key, (str, len, copy)); + return valid_ = outputHandler_.Key(str, len, copy); + } + + bool EndObject(SizeType memberCount) { + if (!valid_) return false; + RAPIDJSON_SCHEMA_HANDLE_PARALLEL_(EndObject, (memberCount)); + if (!CurrentSchema().EndObject(CurrentContext(), memberCount)) return valid_ = false; + RAPIDJSON_SCHEMA_HANDLE_END_(EndObject, (memberCount)); + } + + bool StartArray() { + RAPIDJSON_SCHEMA_HANDLE_BEGIN_(StartArray, (CurrentContext())); + RAPIDJSON_SCHEMA_HANDLE_PARALLEL_(StartArray, ()); + return valid_ = outputHandler_.StartArray(); + } + + bool EndArray(SizeType elementCount) { + if (!valid_) return false; + RAPIDJSON_SCHEMA_HANDLE_PARALLEL_(EndArray, (elementCount)); + if (!CurrentSchema().EndArray(CurrentContext(), elementCount)) return valid_ = false; + RAPIDJSON_SCHEMA_HANDLE_END_(EndArray, (elementCount)); + } + +#undef RAPIDJSON_SCHEMA_HANDLE_BEGIN_VERBOSE_ +#undef RAPIDJSON_SCHEMA_HANDLE_BEGIN_ +#undef RAPIDJSON_SCHEMA_HANDLE_PARALLEL_ +#undef RAPIDJSON_SCHEMA_HANDLE_VALUE_ + + // Implementation of ISchemaStateFactory + virtual ISchemaValidator* CreateSchemaValidator(const SchemaType& root) { + return new (GetStateAllocator().Malloc(sizeof(GenericSchemaValidator))) GenericSchemaValidator(*schemaDocument_, root, +#if RAPIDJSON_SCHEMA_VERBOSE + depth_ + 1, +#endif + &GetStateAllocator()); + } + + virtual void DestroySchemaValidator(ISchemaValidator* validator) { + GenericSchemaValidator* v = static_cast(validator); + v->~GenericSchemaValidator(); + StateAllocator::Free(v); + } + + virtual void* CreateHasher() { + return new (GetStateAllocator().Malloc(sizeof(HasherType))) HasherType(&GetStateAllocator()); + } + + virtual uint64_t GetHashCode(void* hasher) { + return static_cast(hasher)->GetHashCode(); + } + + virtual void DestroryHasher(void* hasher) { + HasherType* h = static_cast(hasher); + h->~HasherType(); + StateAllocator::Free(h); + } + + virtual void* MallocState(size_t size) { + return GetStateAllocator().Malloc(size); + } + + virtual void FreeState(void* p) { + return StateAllocator::Free(p); + } + +private: + typedef typename SchemaType::Context Context; + typedef GenericValue, StateAllocator> HashCodeArray; + typedef internal::Hasher HasherType; + + GenericSchemaValidator( + const SchemaDocumentType& schemaDocument, + const SchemaType& root, +#if RAPIDJSON_SCHEMA_VERBOSE + unsigned depth, +#endif + StateAllocator* allocator = 0, + size_t schemaStackCapacity = kDefaultSchemaStackCapacity, + size_t documentStackCapacity = kDefaultDocumentStackCapacity) + : + schemaDocument_(&schemaDocument), + root_(root), + outputHandler_(GetNullHandler()), + stateAllocator_(allocator), + ownStateAllocator_(0), + schemaStack_(allocator, schemaStackCapacity), + documentStack_(allocator, documentStackCapacity), + valid_(true) +#if RAPIDJSON_SCHEMA_VERBOSE + , depth_(depth) +#endif + { + } + + StateAllocator& GetStateAllocator() { + if (!stateAllocator_) + stateAllocator_ = ownStateAllocator_ = RAPIDJSON_NEW(StateAllocator()); + return *stateAllocator_; + } + + bool BeginValue() { + if (schemaStack_.Empty()) + PushSchema(root_); + else { + if (CurrentContext().inArray) + internal::TokenHelper, Ch>::AppendIndexToken(documentStack_, CurrentContext().arrayElementIndex); + + if (!CurrentSchema().BeginValue(CurrentContext())) + return false; + + SizeType count = CurrentContext().patternPropertiesSchemaCount; + const SchemaType** sa = CurrentContext().patternPropertiesSchemas; + typename Context::PatternValidatorType patternValidatorType = CurrentContext().valuePatternValidatorType; + bool valueUniqueness = CurrentContext().valueUniqueness; + if (CurrentContext().valueSchema) + PushSchema(*CurrentContext().valueSchema); + + if (count > 0) { + CurrentContext().objectPatternValidatorType = patternValidatorType; + ISchemaValidator**& va = CurrentContext().patternPropertiesValidators; + SizeType& validatorCount = CurrentContext().patternPropertiesValidatorCount; + va = static_cast(MallocState(sizeof(ISchemaValidator*) * count)); + for (SizeType i = 0; i < count; i++) + va[validatorCount++] = CreateSchemaValidator(*sa[i]); + } + + CurrentContext().arrayUniqueness = valueUniqueness; + } + return true; + } + + bool EndValue() { + if (!CurrentSchema().EndValue(CurrentContext())) + return false; + +#if RAPIDJSON_SCHEMA_VERBOSE + GenericStringBuffer sb; + schemaDocument_->GetPointer(&CurrentSchema()).Stringify(sb); + + *documentStack_.template Push() = '\0'; + documentStack_.template Pop(1); + internal::PrintValidatorPointers(depth_, sb.GetString(), documentStack_.template Bottom()); +#endif + + uint64_t h = CurrentContext().arrayUniqueness ? static_cast(CurrentContext().hasher)->GetHashCode() : 0; + + PopSchema(); + + if (!schemaStack_.Empty()) { + Context& context = CurrentContext(); + if (context.valueUniqueness) { + HashCodeArray* a = static_cast(context.arrayElementHashCodes); + if (!a) + CurrentContext().arrayElementHashCodes = a = new (GetStateAllocator().Malloc(sizeof(HashCodeArray))) HashCodeArray(kArrayType); + for (typename HashCodeArray::ConstValueIterator itr = a->Begin(); itr != a->End(); ++itr) + if (itr->GetUint64() == h) + RAPIDJSON_INVALID_KEYWORD_RETURN(SchemaType::GetUniqueItemsString()); + a->PushBack(h, GetStateAllocator()); + } + } + + // Remove the last token of document pointer + while (!documentStack_.Empty() && *documentStack_.template Pop(1) != '/') + ; + + return true; + } + + void AppendToken(const Ch* str, SizeType len) { + documentStack_.template Reserve(1 + len * 2); // worst case all characters are escaped as two characters + *documentStack_.template PushUnsafe() = '/'; + for (SizeType i = 0; i < len; i++) { + if (str[i] == '~') { + *documentStack_.template PushUnsafe() = '~'; + *documentStack_.template PushUnsafe() = '0'; + } + else if (str[i] == '/') { + *documentStack_.template PushUnsafe() = '~'; + *documentStack_.template PushUnsafe() = '1'; + } + else + *documentStack_.template PushUnsafe() = str[i]; + } + } + + RAPIDJSON_FORCEINLINE void PushSchema(const SchemaType& schema) { new (schemaStack_.template Push()) Context(*this, &schema); } + + RAPIDJSON_FORCEINLINE void PopSchema() { + Context* c = schemaStack_.template Pop(1); + if (HashCodeArray* a = static_cast(c->arrayElementHashCodes)) { + a->~HashCodeArray(); + StateAllocator::Free(a); + } + c->~Context(); + } + + const SchemaType& CurrentSchema() const { return *schemaStack_.template Top()->schema; } + Context& CurrentContext() { return *schemaStack_.template Top(); } + const Context& CurrentContext() const { return *schemaStack_.template Top(); } + + static OutputHandler& GetNullHandler() { + static OutputHandler nullHandler; + return nullHandler; + } + + static const size_t kDefaultSchemaStackCapacity = 1024; + static const size_t kDefaultDocumentStackCapacity = 256; + const SchemaDocumentType* schemaDocument_; + const SchemaType& root_; + OutputHandler& outputHandler_; + StateAllocator* stateAllocator_; + StateAllocator* ownStateAllocator_; + internal::Stack schemaStack_; //!< stack to store the current path of schema (BaseSchemaType *) + internal::Stack documentStack_; //!< stack to store the current path of validating document (Ch) + bool valid_; +#if RAPIDJSON_SCHEMA_VERBOSE + unsigned depth_; +#endif +}; + +typedef GenericSchemaValidator SchemaValidator; + +/////////////////////////////////////////////////////////////////////////////// +// SchemaValidatingReader + +//! A helper class for parsing with validation. +/*! + This helper class is a functor, designed as a parameter of \ref GenericDocument::Populate(). + + \tparam parseFlags Combination of \ref ParseFlag. + \tparam InputStream Type of input stream, implementing Stream concept. + \tparam SourceEncoding Encoding of the input stream. + \tparam SchemaDocumentType Type of schema document. + \tparam StackAllocator Allocator type for stack. +*/ +template < + unsigned parseFlags, + typename InputStream, + typename SourceEncoding, + typename SchemaDocumentType = SchemaDocument, + typename StackAllocator = CrtAllocator> +class SchemaValidatingReader { +public: + typedef typename SchemaDocumentType::PointerType PointerType; + typedef typename InputStream::Ch Ch; + + //! Constructor + /*! + \param is Input stream. + \param sd Schema document. + */ + SchemaValidatingReader(InputStream& is, const SchemaDocumentType& sd) : is_(is), sd_(sd), invalidSchemaKeyword_(), isValid_(true) {} + + template + bool operator()(Handler& handler) { + GenericReader reader; + GenericSchemaValidator validator(sd_, handler); + parseResult_ = reader.template Parse(is_, validator); + + isValid_ = validator.IsValid(); + if (isValid_) { + invalidSchemaPointer_ = PointerType(); + invalidSchemaKeyword_ = 0; + invalidDocumentPointer_ = PointerType(); + } + else { + invalidSchemaPointer_ = validator.GetInvalidSchemaPointer(); + invalidSchemaKeyword_ = validator.GetInvalidSchemaKeyword(); + invalidDocumentPointer_ = validator.GetInvalidDocumentPointer(); + } + + return parseResult_; + } + + const ParseResult& GetParseResult() const { return parseResult_; } + bool IsValid() const { return isValid_; } + const PointerType& GetInvalidSchemaPointer() const { return invalidSchemaPointer_; } + const Ch* GetInvalidSchemaKeyword() const { return invalidSchemaKeyword_; } + const PointerType& GetInvalidDocumentPointer() const { return invalidDocumentPointer_; } + +private: + InputStream& is_; + const SchemaDocumentType& sd_; + + ParseResult parseResult_; + PointerType invalidSchemaPointer_; + const Ch* invalidSchemaKeyword_; + PointerType invalidDocumentPointer_; + bool isValid_; +}; + +RAPIDJSON_NAMESPACE_END +RAPIDJSON_DIAG_POP + +#endif // RAPIDJSON_SCHEMA_H_ diff --git a/include/rapidjson/stream.h b/include/rapidjson/stream.h new file mode 100644 index 0000000..fef82c2 --- /dev/null +++ b/include/rapidjson/stream.h @@ -0,0 +1,179 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#include "rapidjson.h" + +#ifndef RAPIDJSON_STREAM_H_ +#define RAPIDJSON_STREAM_H_ + +#include "encodings.h" + +RAPIDJSON_NAMESPACE_BEGIN + +/////////////////////////////////////////////////////////////////////////////// +// Stream + +/*! \class rapidjson::Stream + \brief Concept for reading and writing characters. + + For read-only stream, no need to implement PutBegin(), Put(), Flush() and PutEnd(). + + For write-only stream, only need to implement Put() and Flush(). + +\code +concept Stream { + typename Ch; //!< Character type of the stream. + + //! Read the current character from stream without moving the read cursor. + Ch Peek() const; + + //! Read the current character from stream and moving the read cursor to next character. + Ch Take(); + + //! Get the current read cursor. + //! \return Number of characters read from start. + size_t Tell(); + + //! Begin writing operation at the current read pointer. + //! \return The begin writer pointer. + Ch* PutBegin(); + + //! Write a character. + void Put(Ch c); + + //! Flush the buffer. + void Flush(); + + //! End the writing operation. + //! \param begin The begin write pointer returned by PutBegin(). + //! \return Number of characters written. + size_t PutEnd(Ch* begin); +} +\endcode +*/ + +//! Provides additional information for stream. +/*! + By using traits pattern, this type provides a default configuration for stream. + For custom stream, this type can be specialized for other configuration. + See TEST(Reader, CustomStringStream) in readertest.cpp for example. +*/ +template +struct StreamTraits { + //! Whether to make local copy of stream for optimization during parsing. + /*! + By default, for safety, streams do not use local copy optimization. + Stream that can be copied fast should specialize this, like StreamTraits. + */ + enum { copyOptimization = 0 }; +}; + +//! Reserve n characters for writing to a stream. +template +inline void PutReserve(Stream& stream, size_t count) { + (void)stream; + (void)count; +} + +//! Write character to a stream, presuming buffer is reserved. +template +inline void PutUnsafe(Stream& stream, typename Stream::Ch c) { + stream.Put(c); +} + +//! Put N copies of a character to a stream. +template +inline void PutN(Stream& stream, Ch c, size_t n) { + PutReserve(stream, n); + for (size_t i = 0; i < n; i++) + PutUnsafe(stream, c); +} + +/////////////////////////////////////////////////////////////////////////////// +// StringStream + +//! Read-only string stream. +/*! \note implements Stream concept +*/ +template +struct GenericStringStream { + typedef typename Encoding::Ch Ch; + + GenericStringStream(const Ch *src) : src_(src), head_(src) {} + + Ch Peek() const { return *src_; } + Ch Take() { return *src_++; } + size_t Tell() const { return static_cast(src_ - head_); } + + Ch* PutBegin() { RAPIDJSON_ASSERT(false); return 0; } + void Put(Ch) { RAPIDJSON_ASSERT(false); } + void Flush() { RAPIDJSON_ASSERT(false); } + size_t PutEnd(Ch*) { RAPIDJSON_ASSERT(false); return 0; } + + const Ch* src_; //!< Current read position. + const Ch* head_; //!< Original head of the string. +}; + +template +struct StreamTraits > { + enum { copyOptimization = 1 }; +}; + +//! String stream with UTF8 encoding. +typedef GenericStringStream > StringStream; + +/////////////////////////////////////////////////////////////////////////////// +// InsituStringStream + +//! A read-write string stream. +/*! This string stream is particularly designed for in-situ parsing. + \note implements Stream concept +*/ +template +struct GenericInsituStringStream { + typedef typename Encoding::Ch Ch; + + GenericInsituStringStream(Ch *src) : src_(src), dst_(0), head_(src) {} + + // Read + Ch Peek() { return *src_; } + Ch Take() { return *src_++; } + size_t Tell() { return static_cast(src_ - head_); } + + // Write + void Put(Ch c) { RAPIDJSON_ASSERT(dst_ != 0); *dst_++ = c; } + + Ch* PutBegin() { return dst_ = src_; } + size_t PutEnd(Ch* begin) { return static_cast(dst_ - begin); } + void Flush() {} + + Ch* Push(size_t count) { Ch* begin = dst_; dst_ += count; return begin; } + void Pop(size_t count) { dst_ -= count; } + + Ch* src_; + Ch* dst_; + Ch* head_; +}; + +template +struct StreamTraits > { + enum { copyOptimization = 1 }; +}; + +//! Insitu string stream with UTF8 encoding. +typedef GenericInsituStringStream > InsituStringStream; + +RAPIDJSON_NAMESPACE_END + +#endif // RAPIDJSON_STREAM_H_ diff --git a/include/rapidjson/stringbuffer.h b/include/rapidjson/stringbuffer.h new file mode 100644 index 0000000..78f34d2 --- /dev/null +++ b/include/rapidjson/stringbuffer.h @@ -0,0 +1,117 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_STRINGBUFFER_H_ +#define RAPIDJSON_STRINGBUFFER_H_ + +#include "stream.h" +#include "internal/stack.h" + +#if RAPIDJSON_HAS_CXX11_RVALUE_REFS +#include // std::move +#endif + +#include "internal/stack.h" + +#if defined(__clang__) +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(c++98-compat) +#endif + +RAPIDJSON_NAMESPACE_BEGIN + +//! Represents an in-memory output stream. +/*! + \tparam Encoding Encoding of the stream. + \tparam Allocator type for allocating memory buffer. + \note implements Stream concept +*/ +template +class GenericStringBuffer { +public: + typedef typename Encoding::Ch Ch; + + GenericStringBuffer(Allocator* allocator = 0, size_t capacity = kDefaultCapacity) : stack_(allocator, capacity) {} + +#if RAPIDJSON_HAS_CXX11_RVALUE_REFS + GenericStringBuffer(GenericStringBuffer&& rhs) : stack_(std::move(rhs.stack_)) {} + GenericStringBuffer& operator=(GenericStringBuffer&& rhs) { + if (&rhs != this) + stack_ = std::move(rhs.stack_); + return *this; + } +#endif + + void Put(Ch c) { *stack_.template Push() = c; } + void PutUnsafe(Ch c) { *stack_.template PushUnsafe() = c; } + void Flush() {} + + void Clear() { stack_.Clear(); } + void ShrinkToFit() { + // Push and pop a null terminator. This is safe. + *stack_.template Push() = '\0'; + stack_.ShrinkToFit(); + stack_.template Pop(1); + } + + void Reserve(size_t count) { stack_.template Reserve(count); } + Ch* Push(size_t count) { return stack_.template Push(count); } + Ch* PushUnsafe(size_t count) { return stack_.template PushUnsafe(count); } + void Pop(size_t count) { stack_.template Pop(count); } + + const Ch* GetString() const { + // Push and pop a null terminator. This is safe. + *stack_.template Push() = '\0'; + stack_.template Pop(1); + + return stack_.template Bottom(); + } + + size_t GetSize() const { return stack_.GetSize(); } + + static const size_t kDefaultCapacity = 256; + mutable internal::Stack stack_; + +private: + // Prohibit copy constructor & assignment operator. + GenericStringBuffer(const GenericStringBuffer&); + GenericStringBuffer& operator=(const GenericStringBuffer&); +}; + +//! String buffer with UTF8 encoding +typedef GenericStringBuffer > StringBuffer; + +template +inline void PutReserve(GenericStringBuffer& stream, size_t count) { + stream.Reserve(count); +} + +template +inline void PutUnsafe(GenericStringBuffer& stream, typename Encoding::Ch c) { + stream.PutUnsafe(c); +} + +//! Implement specialized version of PutN() with memset() for better performance. +template<> +inline void PutN(GenericStringBuffer >& stream, char c, size_t n) { + std::memset(stream.stack_.Push(n), c, n * sizeof(c)); +} + +RAPIDJSON_NAMESPACE_END + +#if defined(__clang__) +RAPIDJSON_DIAG_POP +#endif + +#endif // RAPIDJSON_STRINGBUFFER_H_ diff --git a/include/rapidjson/writer.h b/include/rapidjson/writer.h new file mode 100644 index 0000000..94f22dd --- /dev/null +++ b/include/rapidjson/writer.h @@ -0,0 +1,610 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_WRITER_H_ +#define RAPIDJSON_WRITER_H_ + +#include "stream.h" +#include "internal/stack.h" +#include "internal/strfunc.h" +#include "internal/dtoa.h" +#include "internal/itoa.h" +#include "stringbuffer.h" +#include // placement new + +#if defined(RAPIDJSON_SIMD) && defined(_MSC_VER) +#include +#pragma intrinsic(_BitScanForward) +#endif +#ifdef RAPIDJSON_SSE42 +#include +#elif defined(RAPIDJSON_SSE2) +#include +#endif + +#ifdef _MSC_VER +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(4127) // conditional expression is constant +#endif + +#ifdef __clang__ +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(padded) +RAPIDJSON_DIAG_OFF(unreachable-code) +#endif + +RAPIDJSON_NAMESPACE_BEGIN + +/////////////////////////////////////////////////////////////////////////////// +// WriteFlag + +/*! \def RAPIDJSON_WRITE_DEFAULT_FLAGS + \ingroup RAPIDJSON_CONFIG + \brief User-defined kWriteDefaultFlags definition. + + User can define this as any \c WriteFlag combinations. +*/ +#ifndef RAPIDJSON_WRITE_DEFAULT_FLAGS +#define RAPIDJSON_WRITE_DEFAULT_FLAGS kWriteNoFlags +#endif + +//! Combination of writeFlags +enum WriteFlag { + kWriteNoFlags = 0, //!< No flags are set. + kWriteValidateEncodingFlag = 1, //!< Validate encoding of JSON strings. + kWriteNanAndInfFlag = 2, //!< Allow writing of Infinity, -Infinity and NaN. + kWriteDefaultFlags = RAPIDJSON_WRITE_DEFAULT_FLAGS //!< Default write flags. Can be customized by defining RAPIDJSON_WRITE_DEFAULT_FLAGS +}; + +//! JSON writer +/*! Writer implements the concept Handler. + It generates JSON text by events to an output os. + + User may programmatically calls the functions of a writer to generate JSON text. + + On the other side, a writer can also be passed to objects that generates events, + + for example Reader::Parse() and Document::Accept(). + + \tparam OutputStream Type of output stream. + \tparam SourceEncoding Encoding of source string. + \tparam TargetEncoding Encoding of output stream. + \tparam StackAllocator Type of allocator for allocating memory of stack. + \note implements Handler concept +*/ +template, typename TargetEncoding = UTF8<>, typename StackAllocator = CrtAllocator, unsigned writeFlags = kWriteDefaultFlags> +class Writer { +public: + typedef typename SourceEncoding::Ch Ch; + + static const int kDefaultMaxDecimalPlaces = 324; + + //! Constructor + /*! \param os Output stream. + \param stackAllocator User supplied allocator. If it is null, it will create a private one. + \param levelDepth Initial capacity of stack. + */ + explicit + Writer(OutputStream& os, StackAllocator* stackAllocator = 0, size_t levelDepth = kDefaultLevelDepth) : + os_(&os), level_stack_(stackAllocator, levelDepth * sizeof(Level)), maxDecimalPlaces_(kDefaultMaxDecimalPlaces), hasRoot_(false) {} + + explicit + Writer(StackAllocator* allocator = 0, size_t levelDepth = kDefaultLevelDepth) : + os_(0), level_stack_(allocator, levelDepth * sizeof(Level)), maxDecimalPlaces_(kDefaultMaxDecimalPlaces), hasRoot_(false) {} + + //! Reset the writer with a new stream. + /*! + This function reset the writer with a new stream and default settings, + in order to make a Writer object reusable for output multiple JSONs. + + \param os New output stream. + \code + Writer writer(os1); + writer.StartObject(); + // ... + writer.EndObject(); + + writer.Reset(os2); + writer.StartObject(); + // ... + writer.EndObject(); + \endcode + */ + void Reset(OutputStream& os) { + os_ = &os; + hasRoot_ = false; + level_stack_.Clear(); + } + + //! Checks whether the output is a complete JSON. + /*! + A complete JSON has a complete root object or array. + */ + bool IsComplete() const { + return hasRoot_ && level_stack_.Empty(); + } + + int GetMaxDecimalPlaces() const { + return maxDecimalPlaces_; + } + + //! Sets the maximum number of decimal places for double output. + /*! + This setting truncates the output with specified number of decimal places. + + For example, + + \code + writer.SetMaxDecimalPlaces(3); + writer.StartArray(); + writer.Double(0.12345); // "0.123" + writer.Double(0.0001); // "0.0" + writer.Double(1.234567890123456e30); // "1.234567890123456e30" (do not truncate significand for positive exponent) + writer.Double(1.23e-4); // "0.0" (do truncate significand for negative exponent) + writer.EndArray(); + \endcode + + The default setting does not truncate any decimal places. You can restore to this setting by calling + \code + writer.SetMaxDecimalPlaces(Writer::kDefaultMaxDecimalPlaces); + \endcode + */ + void SetMaxDecimalPlaces(int maxDecimalPlaces) { + maxDecimalPlaces_ = maxDecimalPlaces; + } + + /*!@name Implementation of Handler + \see Handler + */ + //@{ + + bool Null() { Prefix(kNullType); return EndValue(WriteNull()); } + bool Bool(bool b) { Prefix(b ? kTrueType : kFalseType); return EndValue(WriteBool(b)); } + bool Int(int i) { Prefix(kNumberType); return EndValue(WriteInt(i)); } + bool Uint(unsigned u) { Prefix(kNumberType); return EndValue(WriteUint(u)); } + bool Int64(int64_t i64) { Prefix(kNumberType); return EndValue(WriteInt64(i64)); } + bool Uint64(uint64_t u64) { Prefix(kNumberType); return EndValue(WriteUint64(u64)); } + + //! Writes the given \c double value to the stream + /*! + \param d The value to be written. + \return Whether it is succeed. + */ + bool Double(double d) { Prefix(kNumberType); return EndValue(WriteDouble(d)); } + + bool RawNumber(const Ch* str, SizeType length, bool copy = false) { + (void)copy; + Prefix(kNumberType); + return EndValue(WriteString(str, length)); + } + + bool String(const Ch* str, SizeType length, bool copy = false) { + (void)copy; + Prefix(kStringType); + return EndValue(WriteString(str, length)); + } + +#if RAPIDJSON_HAS_STDSTRING + bool String(const std::basic_string& str) { + return String(str.data(), SizeType(str.size())); + } +#endif + + bool StartObject() { + Prefix(kObjectType); + new (level_stack_.template Push()) Level(false); + return WriteStartObject(); + } + + bool Key(const Ch* str, SizeType length, bool copy = false) { return String(str, length, copy); } + + bool EndObject(SizeType memberCount = 0) { + (void)memberCount; + RAPIDJSON_ASSERT(level_stack_.GetSize() >= sizeof(Level)); + RAPIDJSON_ASSERT(!level_stack_.template Top()->inArray); + level_stack_.template Pop(1); + return EndValue(WriteEndObject()); + } + + bool StartArray() { + Prefix(kArrayType); + new (level_stack_.template Push()) Level(true); + return WriteStartArray(); + } + + bool EndArray(SizeType elementCount = 0) { + (void)elementCount; + RAPIDJSON_ASSERT(level_stack_.GetSize() >= sizeof(Level)); + RAPIDJSON_ASSERT(level_stack_.template Top()->inArray); + level_stack_.template Pop(1); + return EndValue(WriteEndArray()); + } + //@} + + /*! @name Convenience extensions */ + //@{ + + //! Simpler but slower overload. + bool String(const Ch* str) { return String(str, internal::StrLen(str)); } + bool Key(const Ch* str) { return Key(str, internal::StrLen(str)); } + + //@} + + //! Write a raw JSON value. + /*! + For user to write a stringified JSON as a value. + + \param json A well-formed JSON value. It should not contain null character within [0, length - 1] range. + \param length Length of the json. + \param type Type of the root of json. + */ + bool RawValue(const Ch* json, size_t length, Type type) { Prefix(type); return EndValue(WriteRawValue(json, length)); } + +protected: + //! Information for each nested level + struct Level { + Level(bool inArray_) : valueCount(0), inArray(inArray_) {} + size_t valueCount; //!< number of values in this level + bool inArray; //!< true if in array, otherwise in object + }; + + static const size_t kDefaultLevelDepth = 32; + + bool WriteNull() { + PutReserve(*os_, 4); + PutUnsafe(*os_, 'n'); PutUnsafe(*os_, 'u'); PutUnsafe(*os_, 'l'); PutUnsafe(*os_, 'l'); return true; + } + + bool WriteBool(bool b) { + if (b) { + PutReserve(*os_, 4); + PutUnsafe(*os_, 't'); PutUnsafe(*os_, 'r'); PutUnsafe(*os_, 'u'); PutUnsafe(*os_, 'e'); + } + else { + PutReserve(*os_, 5); + PutUnsafe(*os_, 'f'); PutUnsafe(*os_, 'a'); PutUnsafe(*os_, 'l'); PutUnsafe(*os_, 's'); PutUnsafe(*os_, 'e'); + } + return true; + } + + bool WriteInt(int i) { + char buffer[11]; + const char* end = internal::i32toa(i, buffer); + PutReserve(*os_, static_cast(end - buffer)); + for (const char* p = buffer; p != end; ++p) + PutUnsafe(*os_, static_cast(*p)); + return true; + } + + bool WriteUint(unsigned u) { + char buffer[10]; + const char* end = internal::u32toa(u, buffer); + PutReserve(*os_, static_cast(end - buffer)); + for (const char* p = buffer; p != end; ++p) + PutUnsafe(*os_, static_cast(*p)); + return true; + } + + bool WriteInt64(int64_t i64) { + char buffer[21]; + const char* end = internal::i64toa(i64, buffer); + PutReserve(*os_, static_cast(end - buffer)); + for (const char* p = buffer; p != end; ++p) + PutUnsafe(*os_, static_cast(*p)); + return true; + } + + bool WriteUint64(uint64_t u64) { + char buffer[20]; + char* end = internal::u64toa(u64, buffer); + PutReserve(*os_, static_cast(end - buffer)); + for (char* p = buffer; p != end; ++p) + PutUnsafe(*os_, static_cast(*p)); + return true; + } + + bool WriteDouble(double d) { + if (internal::Double(d).IsNanOrInf()) { + if (!(writeFlags & kWriteNanAndInfFlag)) + return false; + if (internal::Double(d).IsNan()) { + PutReserve(*os_, 3); + PutUnsafe(*os_, 'N'); PutUnsafe(*os_, 'a'); PutUnsafe(*os_, 'N'); + return true; + } + if (internal::Double(d).Sign()) { + PutReserve(*os_, 9); + PutUnsafe(*os_, '-'); + } + else + PutReserve(*os_, 8); + PutUnsafe(*os_, 'I'); PutUnsafe(*os_, 'n'); PutUnsafe(*os_, 'f'); + PutUnsafe(*os_, 'i'); PutUnsafe(*os_, 'n'); PutUnsafe(*os_, 'i'); PutUnsafe(*os_, 't'); PutUnsafe(*os_, 'y'); + return true; + } + + char buffer[25]; + char* end = internal::dtoa(d, buffer, maxDecimalPlaces_); + PutReserve(*os_, static_cast(end - buffer)); + for (char* p = buffer; p != end; ++p) + PutUnsafe(*os_, static_cast(*p)); + return true; + } + + bool WriteString(const Ch* str, SizeType length) { + static const typename TargetEncoding::Ch hexDigits[16] = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' }; + static const char escape[256] = { +#define Z16 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 + //0 1 2 3 4 5 6 7 8 9 A B C D E F + 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'b', 't', 'n', 'u', 'f', 'r', 'u', 'u', // 00 + 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', // 10 + 0, 0, '"', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 20 + Z16, Z16, // 30~4F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,'\\', 0, 0, 0, // 50 + Z16, Z16, Z16, Z16, Z16, Z16, Z16, Z16, Z16, Z16 // 60~FF +#undef Z16 + }; + + if (TargetEncoding::supportUnicode) + PutReserve(*os_, 2 + length * 6); // "\uxxxx..." + else + PutReserve(*os_, 2 + length * 12); // "\uxxxx\uyyyy..." + + PutUnsafe(*os_, '\"'); + GenericStringStream is(str); + while (ScanWriteUnescapedString(is, length)) { + const Ch c = is.Peek(); + if (!TargetEncoding::supportUnicode && static_cast(c) >= 0x80) { + // Unicode escaping + unsigned codepoint; + if (RAPIDJSON_UNLIKELY(!SourceEncoding::Decode(is, &codepoint))) + return false; + PutUnsafe(*os_, '\\'); + PutUnsafe(*os_, 'u'); + if (codepoint <= 0xD7FF || (codepoint >= 0xE000 && codepoint <= 0xFFFF)) { + PutUnsafe(*os_, hexDigits[(codepoint >> 12) & 15]); + PutUnsafe(*os_, hexDigits[(codepoint >> 8) & 15]); + PutUnsafe(*os_, hexDigits[(codepoint >> 4) & 15]); + PutUnsafe(*os_, hexDigits[(codepoint ) & 15]); + } + else { + RAPIDJSON_ASSERT(codepoint >= 0x010000 && codepoint <= 0x10FFFF); + // Surrogate pair + unsigned s = codepoint - 0x010000; + unsigned lead = (s >> 10) + 0xD800; + unsigned trail = (s & 0x3FF) + 0xDC00; + PutUnsafe(*os_, hexDigits[(lead >> 12) & 15]); + PutUnsafe(*os_, hexDigits[(lead >> 8) & 15]); + PutUnsafe(*os_, hexDigits[(lead >> 4) & 15]); + PutUnsafe(*os_, hexDigits[(lead ) & 15]); + PutUnsafe(*os_, '\\'); + PutUnsafe(*os_, 'u'); + PutUnsafe(*os_, hexDigits[(trail >> 12) & 15]); + PutUnsafe(*os_, hexDigits[(trail >> 8) & 15]); + PutUnsafe(*os_, hexDigits[(trail >> 4) & 15]); + PutUnsafe(*os_, hexDigits[(trail ) & 15]); + } + } + else if ((sizeof(Ch) == 1 || static_cast(c) < 256) && RAPIDJSON_UNLIKELY(escape[static_cast(c)])) { + is.Take(); + PutUnsafe(*os_, '\\'); + PutUnsafe(*os_, static_cast(escape[static_cast(c)])); + if (escape[static_cast(c)] == 'u') { + PutUnsafe(*os_, '0'); + PutUnsafe(*os_, '0'); + PutUnsafe(*os_, hexDigits[static_cast(c) >> 4]); + PutUnsafe(*os_, hexDigits[static_cast(c) & 0xF]); + } + } + else if (RAPIDJSON_UNLIKELY(!(writeFlags & kWriteValidateEncodingFlag ? + Transcoder::Validate(is, *os_) : + Transcoder::TranscodeUnsafe(is, *os_)))) + return false; + } + PutUnsafe(*os_, '\"'); + return true; + } + + bool ScanWriteUnescapedString(GenericStringStream& is, size_t length) { + return RAPIDJSON_LIKELY(is.Tell() < length); + } + + bool WriteStartObject() { os_->Put('{'); return true; } + bool WriteEndObject() { os_->Put('}'); return true; } + bool WriteStartArray() { os_->Put('['); return true; } + bool WriteEndArray() { os_->Put(']'); return true; } + + bool WriteRawValue(const Ch* json, size_t length) { + PutReserve(*os_, length); + for (size_t i = 0; i < length; i++) { + RAPIDJSON_ASSERT(json[i] != '\0'); + PutUnsafe(*os_, json[i]); + } + return true; + } + + void Prefix(Type type) { + (void)type; + if (RAPIDJSON_LIKELY(level_stack_.GetSize() != 0)) { // this value is not at root + Level* level = level_stack_.template Top(); + if (level->valueCount > 0) { + if (level->inArray) + os_->Put(','); // add comma if it is not the first element in array + else // in object + os_->Put((level->valueCount % 2 == 0) ? ',' : ':'); + } + if (!level->inArray && level->valueCount % 2 == 0) + RAPIDJSON_ASSERT(type == kStringType); // if it's in object, then even number should be a name + level->valueCount++; + } + else { + RAPIDJSON_ASSERT(!hasRoot_); // Should only has one and only one root. + hasRoot_ = true; + } + } + + // Flush the value if it is the top level one. + bool EndValue(bool ret) { + if (RAPIDJSON_UNLIKELY(level_stack_.Empty())) // end of json text + os_->Flush(); + return ret; + } + + OutputStream* os_; + internal::Stack level_stack_; + int maxDecimalPlaces_; + bool hasRoot_; + +private: + // Prohibit copy constructor & assignment operator. + Writer(const Writer&); + Writer& operator=(const Writer&); +}; + +// Full specialization for StringStream to prevent memory copying + +template<> +inline bool Writer::WriteInt(int i) { + char *buffer = os_->Push(11); + const char* end = internal::i32toa(i, buffer); + os_->Pop(static_cast(11 - (end - buffer))); + return true; +} + +template<> +inline bool Writer::WriteUint(unsigned u) { + char *buffer = os_->Push(10); + const char* end = internal::u32toa(u, buffer); + os_->Pop(static_cast(10 - (end - buffer))); + return true; +} + +template<> +inline bool Writer::WriteInt64(int64_t i64) { + char *buffer = os_->Push(21); + const char* end = internal::i64toa(i64, buffer); + os_->Pop(static_cast(21 - (end - buffer))); + return true; +} + +template<> +inline bool Writer::WriteUint64(uint64_t u) { + char *buffer = os_->Push(20); + const char* end = internal::u64toa(u, buffer); + os_->Pop(static_cast(20 - (end - buffer))); + return true; +} + +template<> +inline bool Writer::WriteDouble(double d) { + if (internal::Double(d).IsNanOrInf()) { + // Note: This code path can only be reached if (RAPIDJSON_WRITE_DEFAULT_FLAGS & kWriteNanAndInfFlag). + if (!(kWriteDefaultFlags & kWriteNanAndInfFlag)) + return false; + if (internal::Double(d).IsNan()) { + PutReserve(*os_, 3); + PutUnsafe(*os_, 'N'); PutUnsafe(*os_, 'a'); PutUnsafe(*os_, 'N'); + return true; + } + if (internal::Double(d).Sign()) { + PutReserve(*os_, 9); + PutUnsafe(*os_, '-'); + } + else + PutReserve(*os_, 8); + PutUnsafe(*os_, 'I'); PutUnsafe(*os_, 'n'); PutUnsafe(*os_, 'f'); + PutUnsafe(*os_, 'i'); PutUnsafe(*os_, 'n'); PutUnsafe(*os_, 'i'); PutUnsafe(*os_, 't'); PutUnsafe(*os_, 'y'); + return true; + } + + char *buffer = os_->Push(25); + char* end = internal::dtoa(d, buffer, maxDecimalPlaces_); + os_->Pop(static_cast(25 - (end - buffer))); + return true; +} + +#if defined(RAPIDJSON_SSE2) || defined(RAPIDJSON_SSE42) +template<> +inline bool Writer::ScanWriteUnescapedString(StringStream& is, size_t length) { + if (length < 16) + return RAPIDJSON_LIKELY(is.Tell() < length); + + if (!RAPIDJSON_LIKELY(is.Tell() < length)) + return false; + + const char* p = is.src_; + const char* end = is.head_ + length; + const char* nextAligned = reinterpret_cast((reinterpret_cast(p) + 15) & static_cast(~15)); + const char* endAligned = reinterpret_cast(reinterpret_cast(end) & static_cast(~15)); + if (nextAligned > end) + return true; + + while (p != nextAligned) + if (*p < 0x20 || *p == '\"' || *p == '\\') { + is.src_ = p; + return RAPIDJSON_LIKELY(is.Tell() < length); + } + else + os_->PutUnsafe(*p++); + + // The rest of string using SIMD + static const char dquote[16] = { '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"' }; + static const char bslash[16] = { '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\' }; + static const char space[16] = { 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19 }; + const __m128i dq = _mm_loadu_si128(reinterpret_cast(&dquote[0])); + const __m128i bs = _mm_loadu_si128(reinterpret_cast(&bslash[0])); + const __m128i sp = _mm_loadu_si128(reinterpret_cast(&space[0])); + + for (; p != endAligned; p += 16) { + const __m128i s = _mm_load_si128(reinterpret_cast(p)); + const __m128i t1 = _mm_cmpeq_epi8(s, dq); + const __m128i t2 = _mm_cmpeq_epi8(s, bs); + const __m128i t3 = _mm_cmpeq_epi8(_mm_max_epu8(s, sp), sp); // s < 0x20 <=> max(s, 0x19) == 0x19 + const __m128i x = _mm_or_si128(_mm_or_si128(t1, t2), t3); + unsigned short r = static_cast(_mm_movemask_epi8(x)); + if (RAPIDJSON_UNLIKELY(r != 0)) { // some of characters is escaped + SizeType len; +#ifdef _MSC_VER // Find the index of first escaped + unsigned long offset; + _BitScanForward(&offset, r); + len = offset; +#else + len = static_cast(__builtin_ffs(r) - 1); +#endif + char* q = reinterpret_cast(os_->PushUnsafe(len)); + for (size_t i = 0; i < len; i++) + q[i] = p[i]; + + p += len; + break; + } + _mm_storeu_si128(reinterpret_cast<__m128i *>(os_->PushUnsafe(16)), s); + } + + is.src_ = p; + return RAPIDJSON_LIKELY(is.Tell() < length); +} +#endif // defined(RAPIDJSON_SSE2) || defined(RAPIDJSON_SSE42) + +RAPIDJSON_NAMESPACE_END + +#ifdef _MSC_VER +RAPIDJSON_DIAG_POP +#endif + +#ifdef __clang__ +RAPIDJSON_DIAG_POP +#endif + +#endif // RAPIDJSON_RAPIDJSON_H_ diff --git a/include/seiscomp/broker/client.h b/include/seiscomp/broker/client.h new file mode 100644 index 0000000..90a30d0 --- /dev/null +++ b/include/seiscomp/broker/client.h @@ -0,0 +1,219 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * Author: Jan Becker * + * Email: jabe@gempa.de * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef GEMPA_BROKER_CLIENT_H__ +#define GEMPA_BROKER_CLIENT_H__ + + +#include +#include + +#include + + +namespace Seiscomp { +namespace Messaging { +namespace Broker { + + +class Message; +class Group; +class Queue; + + +/** + * @brief The Client interface describes a client connected to a queue acting + * as message subscriber. + * + * The only thing that a client does is to store a name which is being assigned + * by the queue and publish messages. The publish method is abstract and needs + * to be implemented by derived classes such as communication protocols. + */ +class SC_BROKER_API Client { + // ---------------------------------------------------------------------- + // Public types and enumerations + // ---------------------------------------------------------------------- + public: + enum Constants { + MaxLocalHeapSize = 128 + }; + + + // ---------------------------------------------------------------------- + // X'truction + // ---------------------------------------------------------------------- + public: + //! C'tor + Client(); + + //! D'tor + virtual ~Client(); + + + // ---------------------------------------------------------------------- + // Public interface + // ---------------------------------------------------------------------- + public: + const std::string &name() const { return _name; } + + /** + * @brief Returns the absolute memory pointer of an local heap + * offset + * @param offset The offset in bytes + * @return The pointer of the memory block + */ + void *memory(int offset); + const void *memory(int offset) const; + + /** + * @return The time in UTC when the client has connected. + */ + const Core::Time &created() const; + + + // ---------------------------------------------------------------------- + // Subscriber interface + // ---------------------------------------------------------------------- + public: + bool setMembershipInformationEnabled(bool enable); + bool wantsMembershipInformation() const; + + /** + * @brief Sets whether to discard messages where receiver equals + * the sender or not. + * @param enable The enable flag + * @return Success flag + */ + bool setDiscardSelf(bool enable); + bool discardSelf() const; + + /** + * @brief Sets the number of messages required to send back an + * acknoledgement. + * @param numberOfMessages The window size + */ + void setAcknowledgeWindow(SequenceNumber numberOfMessages); + + /** + * @brief Returns the IP address connected to the client socket. + * If the underlying transport does not implement IP socket + * communication, it can return 0. + * @return The IP address + */ + virtual Wired::Socket::IPAddress IPAddress() const = 0; + + /** + * @brief Publishes a message + * + * This method has to be implemented by all subclasses to encode it + * into their transport format and to send it. + * + * @param sender The sender of the message + * @param msg The message + * @return Number of bytes sent + */ + virtual size_t publish(Client *sender, Message *msg) = 0; + + /** + * @brief Notifies a client that a new member entered a group the client + * is also member in. + * @param group The group the new member entered. + * @param newMember The client pointer to the new member. + * @param msg The message to be sent. + */ + virtual void enter(const Group *group, const Client *newMember, Message *msg) = 0; + + /** + * @brief Notifies a client that a member left a group the client + * is also member in. + * @param group The group the member left. + * @param oldMember The client pointer to the member. + * @param msg The message to be sent. + */ + virtual void leave(const Group *group, const Client *oldMember, Message *msg) = 0; + + virtual void disconnected(const Client *disconnectedClient, Message *msg) = 0; + + //! Send acknowledgment to sender + virtual void ack() = 0; + + //! Remove the clients resources + virtual void dispose() = 0; + + + // ---------------------------------------------------------------------- + // Protected members + // ---------------------------------------------------------------------- + protected: + Queue *_queue; + Core::Time _created; + Core::Time _lastSOHReceived; + std::string _name; + bool _wantsMembershipInformation; + bool _discardSelf; + SequenceNumber _sequenceNumber; + SequenceNumber _acknowledgeWindow; + SequenceNumber _acknowledgeCounter; + Core::Time _ackInitiated; + int _inactivityCounter; // The number of seconds + // of inactivity + + + // ---------------------------------------------------------------------- + // Private members + // ---------------------------------------------------------------------- + private: + // Local client heap to additional user data stored by e.g. plugins + char _heap[MaxLocalHeapSize]; + + friend class Queue; +}; + + +inline bool Client::wantsMembershipInformation() const { + return _wantsMembershipInformation; +} + +inline bool Client::discardSelf() const { + return _discardSelf; +} + +inline void *Client::memory(int offset) { + return _heap + offset; +} + +inline const void *Client::memory(int offset) const { + return _heap + offset; +} + +inline const Core::Time &Client::created() const { + return _created; +} + + +} +} +} + + +#endif diff --git a/include/seiscomp/broker/group.h b/include/seiscomp/broker/group.h new file mode 100644 index 0000000..aad9aad --- /dev/null +++ b/include/seiscomp/broker/group.h @@ -0,0 +1,137 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * Author: Jan Becker * + * Email: jabe@gempa.de * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef GEMPA_BROKER_GROUP_H__ +#define GEMPA_BROKER_GROUP_H__ + + +#include +#include + +#include +#include + + +namespace Seiscomp { +namespace Messaging { +namespace Broker { + + +class Client; +class Queue; + + +DEFINE_SMARTPOINTER(Group); + +/** + * @brief The Group class implements a particular group (or channel/topic) of + * a queue. + * + * Each group can have members. A member is a client. This class is nothing + * else than a manager of members in an efficient way. It implements a very + * fast hashset (KHash) of its members which makes member tests very fast and + * does not make it necessary to manage additional complicated lookup + * structures. + */ +class SC_BROKER_API Group : public Core::BaseObject { + // ---------------------------------------------------------------------- + // Public types + // ---------------------------------------------------------------------- + public: + typedef KHashSet Members; + + + // ---------------------------------------------------------------------- + // X'truction + // ---------------------------------------------------------------------- + public: + //! C'tor + explicit Group(const char *name); + + //! D'tor + ~Group(); + + + // ---------------------------------------------------------------------- + // Public interface + // ---------------------------------------------------------------------- + public: + //! Returns the name of the group. + const std::string &name() const; + + //! Returns the number of members. + size_t memberCount() const; + + /** + * @brief Adds a member to the group if it is not yet. + * @param client The pointer identifying a unique client + * @return true on success, false otherwise e.g. duplicates + */ + bool addMember(Client *client); + + /** + * @brief Removes a member from the group. + * @param client The pointer identifying a unique client + * @return true on success, false otherwise e.g. does not exist + */ + bool removeMember(Client *client); + + //! Returns if a client is a member of not. + bool hasMember(const Client *client) const; + + //! Removes all members. + void clearMembers() { _members.clear(); } + + const Members &members() const; + + + // ---------------------------------------------------------------------- + // Private members + // ---------------------------------------------------------------------- + private: + std::string _name; + Members _members; + mutable Tx _txMessages; + mutable Tx _txBytes; + mutable Tx _txPayload; + + + friend class Queue; +}; + + +inline const std::string &Group::name() const { + return _name; +} + +inline const Group::Members &Group::members() const { + return _members; +} + + +} +} +} + + +#endif diff --git a/include/seiscomp/broker/hashset.h b/include/seiscomp/broker/hashset.h new file mode 100644 index 0000000..2fdfcfe --- /dev/null +++ b/include/seiscomp/broker/hashset.h @@ -0,0 +1,666 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * Author: Jan Becker * + * Email: jabe@gempa.de * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef GEMPA_BROKER_HASHSET_H +#define GEMPA_BROKER_HASHSET_H + + +#include +#include +#include + +#include +#include + + +namespace Seiscomp { + + +KHASH_SET_INIT_INT(int) +KHASH_SET_INIT_INT64(int64) +KHASH_SET_INIT_STR(str) +KHASH_MAP_INIT_STR(m_str, void*) + + +template +class KHashSet {}; + +template +class KHashMap {}; + + +template <> +class KHashSet { + public: + struct iterator { + iterator() {} + iterator(const iterator &other) : h(other.h), k(other.k) {} + iterator(khash_t(int) *h_, unsigned k_) : h(h_), k(k_) {} + + bool operator==(const iterator &other) const { + return k == other.k; + } + + bool operator!=(const iterator &other) const { + return k != other.k; + } + + // Prefix + iterator &operator++() { + ++k; + while ( k != kh_end(h) && !kh_exist(h, k) ) + ++k; + + return *this; + } + + // Postfix + iterator operator++(int) { + iterator tmp(*this); + ++(*this); + return tmp; + } + + uint32_t operator*() const { + return kh_key(h, k); + } + + khash_t(int) *h; + unsigned k; + }; + + + KHashSet() { + _h = kh_init(int); + } + + ~KHashSet() { + kh_destroy(int, _h); + } + + + public: + iterator begin() const { + unsigned k = kh_begin(_h); + while ( k != kh_end(_h) && !kh_exist(_h, k) ) + ++k; + return iterator(_h, k); + } + + iterator end() const { + return iterator(_h, kh_end(_h)); + } + + size_t size() const { + return kh_size(_h); + } + + void clear() { + kh_clear(int, _h); + } + + int insert(uint32_t v) { + int ret; + kh_put(int, _h, v, &ret); + return ret; + } + + iterator find(uint32_t v) const { + return iterator(_h, kh_get(int, _h, v)); + } + + void erase(iterator it) { + kh_del(int, _h, it.k); + } + + bool contains(uint32_t v) const { + return find(v) != end(); + } + + + private: + khash_t(int) *_h; +}; + + +template <> +class KHashSet { + public: + struct iterator { + iterator() {} + iterator(const iterator &other) : h(other.h), k(other.k) {} + iterator(khash_t(int64) *h_, unsigned k_) : h(h_), k(k_) {} + + bool operator==(const iterator &other) const { + return k == other.k; + } + + bool operator!=(const iterator &other) const { + return k != other.k; + } + + // Prefix + iterator &operator++() { + ++k; + while ( k != kh_end(h) && !kh_exist(h, k) ) + ++k; + + return *this; + } + + // Postfix + iterator operator++(int) { + iterator tmp(*this); + ++(*this); + return tmp; + } + + uint64_t operator*() const { + return kh_key(h, k); + } + + khash_t(int64) *h; + unsigned k; + }; + + + KHashSet() { + _h = kh_init(int64); + } + + ~KHashSet() { + kh_destroy(int64, _h); + } + + + public: + iterator begin() const { + unsigned k = kh_begin(_h); + while ( k != kh_end(_h) && !kh_exist(_h, k) ) + ++k; + return iterator(_h, k); + } + + iterator end() const { + return iterator(_h, kh_end(_h)); + } + + size_t size() const { + return kh_size(_h); + } + + void clear() { + kh_clear(int64, _h); + } + + int insert(uint64_t v) { + int ret; + kh_put(int64, _h, v, &ret); + return ret; + } + + iterator find(uint64_t v) const { + return iterator(_h, kh_get(int64, _h, v)); + } + + void erase(iterator it) { + kh_del(int64, _h, it.k); + } + + bool contains(uint64_t v) const { + return find(v) != end(); + } + + + private: + khash_t(int64) *_h; +}; + + +template <> +class KHashSet { + public: + struct iterator { + iterator() {} + iterator(const iterator &other) : h(other.h), k(other.k) {} + iterator(khash_t(str) *h_, unsigned k_) : h(h_), k(k_) {} + + bool operator==(const iterator &other) const { + return k == other.k; + } + + bool operator!=(const iterator &other) const { + return k != other.k; + } + + // Prefix + iterator &operator++() { + ++k; + while ( k != kh_end(h) && !kh_exist(h, k) ) + ++k; + + return *this; + } + + // Postfix + iterator operator++(int) { + iterator tmp(*this); + ++(*this); + return tmp; + } + + const char *operator*() const { + return kh_key(h, k); + } + + khash_t(str) *h; + unsigned k; + }; + + + KHashSet() { + _h = kh_init(str); + } + + ~KHashSet() { + kh_destroy(str, _h); + } + + + public: + iterator begin() const { + unsigned k = kh_begin(_h); + while ( k != kh_end(_h) && !kh_exist(_h, k) ) + ++k; + return iterator(_h, k); + } + + iterator end() const { + return iterator(_h, kh_end(_h)); + } + + size_t size() const { + return kh_size(_h); + } + + void clear() { + kh_clear(str, _h); + } + + int insert(const char *v) { + int ret; + kh_put(str, _h, v, &ret); + return ret; + } + + iterator find(const char *str) const { + return iterator(_h, kh_get(str, _h, str)); + } + + iterator find(const std::string &str) const { + return iterator(_h, kh_get(str, _h, str.c_str())); + } + + void erase(iterator it) { + kh_del(str, _h, it.k); + } + + bool contains(const char *str) const { + return find(str) != end(); + } + + bool contains(const std::string &str) const { + return find(str) != end(); + } + + + private: + khash_t(str) *_h; +}; + + +template +class KHashMap { + public: + struct iterator { + iterator() {} + iterator(const iterator &other) : h(other.h), k(other.k) {} + iterator(khash_t(m_str) *h_, unsigned k_) : h(h_), k(k_) {} + + bool operator==(const iterator &other) const { + return k == other.k; + } + + bool operator!=(const iterator &other) const { + return k != other.k; + } + + // Prefix + iterator &operator++() { + ++k; + while ( k != kh_end(h) && !kh_exist(h, k) ) + ++k; + + return *this; + } + + // Postfix + iterator operator++(int) { + iterator tmp(*this); + ++(*this); + return tmp; + } + + const char *operator*() const { + return kh_key(h, k); + } + + const char *key() const { + return kh_key(h, k); + } + + V *value() const { + return (V*)kh_value(h, k); + } + + khash_t(m_str) *h; + unsigned k; + }; + + + KHashMap() { + _h = kh_init(m_str); + } + + ~KHashMap() { + kh_destroy(m_str, _h); + } + + + public: + iterator begin() const { + unsigned k = kh_begin(_h); + while ( k != kh_end(_h) && !kh_exist(_h, k) ) + ++k; + return iterator(_h, k); + } + + iterator end() const { + return iterator(_h, kh_end(_h)); + } + + size_t size() const { + return kh_size(_h); + } + + void clear() { + kh_clear(m_str, _h); + } + + int insert(const char *v, V *value) { + int ret; + khiter_t k = kh_put(m_str, _h, v, &ret); + if ( k >= 0 ) + kh_value(_h, k) = value; + return ret; + } + + iterator find(const char *str) const { + return iterator(_h, kh_get(m_str, _h, str)); + } + + iterator find(const std::string &str) const { + return iterator(_h, kh_get(m_str, _h, str.c_str())); + } + + void erase(iterator it) { + kh_del(m_str, _h, it.k); + } + + bool contains(const char *str) const { + return find(str) != end(); + } + + bool contains(const std::string &str) const { + return find(str) != end(); + } + + + private: + khash_t(m_str) *_h; +}; + + + +template +class KHashSetPtrBase {}; + + +template +class KHashSetPtrBase { + public: + struct iterator { + iterator() {} + iterator(const iterator &other) : h(other.h), k(other.k) {} + iterator(khash_t(int) *h_, unsigned k_) : h(h_), k(k_) {} + + bool operator==(const iterator &other) const { + return k == other.k; + } + + bool operator!=(const iterator &other) const { + return k != other.k; + } + + // Prefix + iterator &operator++() { + ++k; + while ( k != kh_end(h) && !kh_exist(h, k) ) + ++k; + + return *this; + } + + // Postfix + iterator operator++(int) { + iterator tmp(*this); + ++(*this); + return tmp; + } + + T operator*() const { + return (T)kh_key(h, k); + } + + khash_t(int) *h; + unsigned k; + }; + + + public: + KHashSetPtrBase() { + _h = kh_init(int); + } + + ~KHashSetPtrBase() { + kh_destroy(int, _h); + } + + + public: + iterator begin() const { + unsigned k = kh_begin(_h); + while ( k != kh_end(_h) && !kh_exist(_h, k) ) + ++k; + return iterator(_h, k); + } + + iterator end() const { + return iterator(_h, kh_end(_h)); + } + + size_t size() const { + return kh_size(_h); + } + + void clear() { + kh_clear(int, _h); + } + + int insert(T v) { + int ret; + kh_put(int, _h, (uintptr_t)v, &ret); + return ret; + } + + iterator find(const void *v) const { + return iterator(_h, kh_get(int, _h, (uintptr_t)v)); + } + + void erase(iterator it) { + kh_del(int, _h, it.k); + } + + bool contains(const void *v) const { + return kh_get(int, _h, (uintptr_t)v) != kh_end(_h); + } + + + private: + khash_t(int) *_h; +}; + + +template +class KHashSetPtrBase { + public: + struct iterator { + iterator() {} + iterator(const iterator &other) : h(other.h), k(other.k) {} + iterator(khash_t(int64) *h_, unsigned k_) : h(h_), k(k_) {} + + bool operator==(const iterator &other) const { + return k == other.k; + } + + bool operator!=(const iterator &other) const { + return k != other.k; + } + + // Prefix + iterator &operator++() { + ++k; + while ( k != kh_end(h) && !kh_exist(h, k) ) + ++k; + + return *this; + } + + // Postfix + iterator operator++(int) { + iterator tmp(*this); + ++(*this); + return tmp; + } + + T operator*() const { + return (T)kh_key(h, k); + } + + khash_t(int64) *h; + unsigned k; + }; + + + public: + KHashSetPtrBase() { + _h = kh_init(int64); + } + + ~KHashSetPtrBase() { + kh_destroy(int64, _h); + } + + + public: + iterator begin() const { + unsigned k = kh_begin(_h); + while ( k != kh_end(_h) && !kh_exist(_h, k) ) + ++k; + return iterator(_h, k); + } + + iterator end() const { + return iterator(_h, kh_end(_h)); + } + + size_t size() const { + return kh_size(_h); + } + + void clear() { + kh_clear(int64, _h); + } + + int insert(T v) { + int ret; + kh_put(int64, _h, (uintptr_t)v, &ret); + return ret; + } + + iterator find(const void *v) const { + return iterator(_h, kh_get(int64, _h, (uintptr_t)v)); + } + + void erase(iterator it) { + kh_del(int64, _h, it.k); + } + + bool contains(const void *v) const { + return kh_get(int64, _h, (uintptr_t)v) != kh_end(_h); + } + + + private: + khash_t(int64) *_h; +}; + + +struct Arch { + enum { + PtrSize = sizeof(intptr_t) + }; +}; + + +template +class KHashSet : public KHashSetPtrBase { + public: + KHashSet() {} +}; + + +} + + +#endif diff --git a/include/seiscomp/broker/message.h b/include/seiscomp/broker/message.h new file mode 100644 index 0000000..6ddb80a --- /dev/null +++ b/include/seiscomp/broker/message.h @@ -0,0 +1,164 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * Author: Jan Becker * + * Email: jabe@gempa.de * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef GEMPA_BROKER_MESSAGE_H__ +#define GEMPA_BROKER_MESSAGE_H__ + + +#include +#include + +#include +#include + +#include + + +namespace Seiscomp { +namespace Messaging { +namespace Broker { + + +class Group; + + +typedef uint64_t SequenceNumber; +#define INVALID_SEQUENCE_NUMBER Seiscomp::Messaging::Broker::SequenceNumber(-1) + + +MAKEENUM( + ContentEncoding, + EVALUES( + Identity, + Deflate, + GZip, + LZ4 + ), + ENAMES( + "identity", + "deflate", + "gzip", + "lz4" + ) +); + +MAKEENUM( + MimeType, + EVALUES( + Binary, + JSON, + BSON, + XML, + IMPORTED_XML, + Text + ), + ENAMES( + "application/x-sc-bin", + "text/json", + "application/x-sc-bson", + "application/x-sc-xml", + "text/xml", + "text/plain" + ) +); + + +DEFINE_SMARTPOINTER(Message); +/** + * @brief The Message class implements the message structure. + * + * A message contains meta data and a payload. Since each protocol has to + * encode the message differently a cached version for each protocol is also + * stored. That buffer can be sent without further modifications. This is + * in particular helpful if a message is going to be sent to hundreds of + * clients connected through the same protocol. The message has to be encoded + * only once and not hundred times. This cache is lazy and will only be + * populated at the first send operation. + */ +class SC_BROKER_API Message : public Seiscomp::Core::BaseObject { + // ---------------------------------------------------------------------- + // X'truction + // ---------------------------------------------------------------------- + public: + //! C'tor + Message(); + + + // ---------------------------------------------------------------------- + // Public interface + // ---------------------------------------------------------------------- + public: + /** + * @brief Decodes a message if object is NULL according to the payload + * and format + * @return true if msg->object is a valid pointer or false otherwise. + */ + bool decode(); + + /** + * @brief Encodes a message if object is not NULL and saves the + * encoded buffer in payload. + * @return true if payload is not empty, false otherwise. + */ + bool encode(); + + + // ---------------------------------------------------------------------- + // Members + // ---------------------------------------------------------------------- + public: + enum struct Type { + Unspecified, + Regular, + Transient, // From this enumeration messages are not processed + Status + }; + + std::string sender; //!< The sender + std::string target; //!< The target group/topic + std::string encoding; //!< The encoding of the data + std::string mimeType; //!< The mime type of the data + std::string payload; //!< The payload bytes + Core::BaseObjectPtr object; //!< The decoded object + Core::Version schemaVersion; //!< The schema version of the payload after decoding + Seiscomp::Core::Time timestamp; //!< The received time + Type type; //!< The message type + bool selfDiscard; //!< Whether self discard should be checked or not + bool processed; + /** The assigned sequence number */ + SequenceNumber sequenceNumber; + + /** Cached encoded version for different protocols */ + Wired::BufferPtr encodingWebSocket; + + /** Cache of the target group */ + Group *_internalGroupPtr; +}; + + +} +} +} + + +#endif diff --git a/include/seiscomp/broker/messagedispatcher.h b/include/seiscomp/broker/messagedispatcher.h new file mode 100644 index 0000000..12b3512 --- /dev/null +++ b/include/seiscomp/broker/messagedispatcher.h @@ -0,0 +1,84 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * Author: Jan Becker * + * Email: jabe@gempa.de * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef GEMPA_BROKER_MESSAGEDISPATCHER_H__ +#define GEMPA_BROKER_MESSAGEDISPATCHER_H__ + + +#include + + +namespace Seiscomp { +namespace Messaging { +namespace Broker { + + +class Queue; + + +/** + * @brief The MessageDispatcher class is used to forward processed messages + * from another thread. + * + * Since it is not safe to call publish on all registered subscribers, the + * dispatcher class is provide safe handling within a given framework. + */ +class SC_BROKER_API MessageDispatcher { + // ---------------------------------------------------------------------- + // X'truction + // ---------------------------------------------------------------------- + public: + //! C'tor + MessageDispatcher() {} + + + // ---------------------------------------------------------------------- + // Dispatcher interface + // ---------------------------------------------------------------------- + public: + /** + * @brief Notifies the dispatcher about a new message. If the message + * should be published, dispatch() must be called. + * @param queue The queue that got a new message to be dispatched + */ + virtual void messageAvailable(Queue *queue) = 0; + + /** + * @brief Dispatches a message from the process-ready-queue. + * + * This call may block if not issued after the messageAvailable() + * signal. + * @param queue The target queue + */ + void flushMessages(Queue *queue) { + queue->flushProcessedMessages(); + } +}; + + +} +} +} + + +#endif diff --git a/include/seiscomp/broker/messageprocessor.h b/include/seiscomp/broker/messageprocessor.h new file mode 100644 index 0000000..79a6f6a --- /dev/null +++ b/include/seiscomp/broker/messageprocessor.h @@ -0,0 +1,145 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * Author: Jan Becker * + * Email: jabe@gempa.de * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef GEMPA_BROKER_MESSAGEPROCESSOR_H__ +#define GEMPA_BROKER_MESSAGEPROCESSOR_H__ + + +#include +#include + +#include +#include +#include + +#include + + +namespace Seiscomp { +namespace Messaging { +namespace Broker { + + +class Client; +class Message; + + +DEFINE_SMARTPOINTER(MessageProcessor); + +/** + * @brief The MessageProcessor class is used inside the broker to process + * messages in any way. The most important use case for such a + * processor is to store the message in the database if it suffices a + * certain format. Once could think of other use cases such as + * building statistics. + */ +class SC_BROKER_API MessageProcessor : public Processor { + // ---------------------------------------------------------------------- + // Public types + // ---------------------------------------------------------------------- + public: + enum Constants { + MaxAdditionalParams = 100 + }; + + enum Mode { + None = 0x00, + Messages = 0x01, + Connections = 0x02 + }; + + using KeyValueCStrPair = std::pair; + using KeyCStrValues = KeyValueCStrPair *; + + using KeyValuePair = std::pair; + using KeyValues = std::vector; + + + // ---------------------------------------------------------------------- + // X'truction + // ---------------------------------------------------------------------- + public: + MessageProcessor(); + virtual ~MessageProcessor(); + + + // ---------------------------------------------------------------------- + // Public virtual interface + // ---------------------------------------------------------------------- + public: + virtual bool acceptConnection(Client *client, + const KeyCStrValues inParams, int inParamCount, + KeyValues &outParams) = 0; + + virtual void dropConnection(Client *client) = 0; + + virtual bool process(Message *msg) = 0; + + + // ---------------------------------------------------------------------- + // Public interface + // ---------------------------------------------------------------------- + public: + int mode() const { return _mode; } + + /** + * @brief Returns whether the processor want to process messages. + * @return Flag + */ + bool isMessageProcessingEnabled() const { return _mode & Messages; } + + /** + * @brief Returns whether the processor want to process connections.. + * @return Flag + */ + bool isConnectionProcessingEnabled() const { return _mode & Connections; } + + + // ---------------------------------------------------------------------- + // Protected methods + // ---------------------------------------------------------------------- + protected: + void setMode(int mode); + + + // ---------------------------------------------------------------------- + // Private members + // ---------------------------------------------------------------------- + private: + int _mode; +}; + + +DEFINE_INTERFACE_FACTORY(MessageProcessor); + + +} +} +} + + +#define REGISTER_BROKER_MESSAGE_PROCESSOR(Class, Service) \ +Seiscomp::Core::Generic::InterfaceFactory __##Class##InterfaceFactory__(Service) + + +#endif diff --git a/include/seiscomp/broker/processor.h b/include/seiscomp/broker/processor.h new file mode 100644 index 0000000..fc9f022 --- /dev/null +++ b/include/seiscomp/broker/processor.h @@ -0,0 +1,96 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * Author: Jan Becker * + * Email: jabe@gempa.de * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef GEMPA_BROKER_PROCESSOR_H__ +#define GEMPA_BROKER_PROCESSOR_H__ + + +#include +#include + +#include + + +namespace Seiscomp { +namespace Messaging { +namespace Broker { + + +class Queue; + + +DEFINE_SMARTPOINTER(Processor); + +class SC_BROKER_API Processor : public Core::BaseObject { + public: + Processor(); + virtual ~Processor(); + + + public: + /** + * @brief Initiales the configuration of a processor from a config object + * and a given parameter name prefix. + * @param conf The configuration file object + * @param configPrefix The prefix that must be preprended to all + * parameters. + * @return Success flag. + */ + virtual bool init(const Config::Config &conf, const std::string &configPrefix) = 0; + + /** + * @brief When a processor has been added to a queue, this method will be + * called. The default implementation does nothing. This method + * can be used to e.g. allocate additional client memory of + * the local client heap. + * @param queue The queue the processor was attached to. + */ + virtual void attach(Queue *queue); + + /** + * @brief Shuts down the processor. + * @return Success flag. + */ + virtual bool close() = 0; + + /** + * @brief Add information to a state of health message + * @param timestamp The timestamp of the information + * @param os The output stream to write to + */ + virtual void getInfo(const Core::Time ×tamp, std::ostream &os) = 0; + + + private: + Queue *_queue; + + friend class Queue; +}; + + +} +} +} + + +#endif diff --git a/include/seiscomp/broker/protocol.h b/include/seiscomp/broker/protocol.h new file mode 100644 index 0000000..8f3563c --- /dev/null +++ b/include/seiscomp/broker/protocol.h @@ -0,0 +1,343 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * Author: Jan Becker * + * Email: jabe@gempa.de * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef GEMPA_BROKER_PROTOCOL_H__ +#define GEMPA_BROKER_PROTOCOL_H__ + + +namespace Seiscomp { +namespace Messaging { +namespace Broker { +namespace Protocol { + + +// Defines do not make much sense inside namespaces but they are placed here +// to stay close to future variable declarations. + +/** + * It follows a list of definitions for all protocol commands and replies and + * their headers. They are being used in the code and changing them here will + * cause a change in behaviour of the server. + */ + +/** + * ``` + * CONNECT + * Ack-Window: [number of messages after which an ack will be send from the server] + * Membership-Info: 1 + * Queue: [name of queue] + * Client-Name: [name of client] + * Subscriptions: [list of groups] + * Seq-No: [last seen sequence number] + * + * ^@ + * ``` + * + * The *Seq-No* header contains the last sequence number the client has seen from + * that queue. That header is optional. If subscriptions are given then the + * client will receive an **ENTER** frame for each group it subscribed to. If any + * of the requested groups does not exist, an **ERROR** frame is sent and the + * connection is closed. + * + * The order of messages looks as follows: + * + * 1. CONNECT + * 2. CONNECTED + * 3. ENTER + * 4. RECV + * + * Step 3 repeats for as many groups as given in the subscription list. Step 4 + * repeats for all messages received during the lifetime of the connection. + */ +#define SCMP_PROTO_CMD_CONNECT "CONNECT" +#define SCMP_PROTO_CMD_CONNECT_HEADER_QUEUE "Queue" +#define SCMP_PROTO_CMD_CONNECT_HEADER_CLIENT_NAME "Client-Name" +#define SCMP_PROTO_CMD_CONNECT_HEADER_MEMBERSHIP_INFO "Membership-Info" +#define SCMP_PROTO_CMD_CONNECT_HEADER_SELF_DISCARD "Self-Discard" +#define SCMP_PROTO_CMD_CONNECT_HEADER_ACK_WINDOW "Ack-Window" +#define SCMP_PROTO_CMD_CONNECT_HEADER_SEQ_NUMBER "Seq-No" +#define SCMP_PROTO_CMD_CONNECT_HEADER_SUBSCRIPTIONS "Subscriptions" + +/** + * ``` + * DISCONNECT + * Receipt: [id] + * + * ^@ + * ``` + * + * The DISCONNECT command ask the server to gracefully shutdown the connection + * and free all associated resources. + */ +#define SCMP_PROTO_CMD_DISCONNECT "DISCONNECT" +#define SCMP_PROTO_CMD_DISCONNECT_HEADER_RECEIPT "Receipt" + +/** + * ``` + * SUBSCRIBE + * Groups: [list of groups] + * + * ^@ + * ``` + * Subscribes to a specific group which must exist on the server. In response + * either an **ENTER** or **ERROR** frame will be received. + */ +#define SCMP_PROTO_CMD_SUBSCRIBE "SUBSCRIBE" +#define SCMP_PROTO_CMD_SUBSCRIBE_HEADER_GROUPS "Groups" + +/** + * ``` + * UNSUBSCRIBE + * Groups: [list of groups] + * + * ^@ + * ``` + * + * Unsubscribes from a specific group which must exist on the server. In + * response either a **LEAVE** or **ERROR** frame will be received. + */ +#define SCMP_PROTO_CMD_UNSUBSCRIBE "UNSUBSCRIBE" +#define SCMP_PROTO_CMD_UNSUBSCRIBE_HEADER_GROUPS SCMP_PROTO_CMD_SUBSCRIBE_HEADER_GROUPS + +/** + * Sends a message to a group or a client (peer-to-peer). + * + * ``` + * SEND + * D: [name of group or the client] + * T: [MIME type] + * E: [transfer encoding] + * L: [length of content] + * + * [payload]^@ + * ``` + * + * Each message sent will increase the private sequence number counter for this + * connection starting with 0. So the first message will get assigned the + * sequence number 1. That counter must be maintained by the client and the + * server to correctly synchronize acknowledgements. If the message is rejected + * an **ERROR** frame will be sent to the client and the connection will be + * closed. + */ +#define SCMP_PROTO_CMD_SEND "SEND" +#define SCMP_PROTO_CMD_SEND_HEADER_DESTINATION "D" +#define SCMP_PROTO_CMD_SEND_HEADER_CONTENT_LENGTH "L" +#define SCMP_PROTO_CMD_SEND_HEADER_ENCODING "E" +#define SCMP_PROTO_CMD_SEND_HEADER_MIMETYPE "T" +#define SCMP_PROTO_CMD_SEND_HEADER_TRANSIENT "Transient" + + +/** + * A member notifies the server about its state including memory consumption, + * cpu usage, uptime and so on. The payload is always a key-value list + * separated by '&'. + * + * ``` + * STATE + * D: [name of group or the client] + * L: [length of content] + * + * hostname=localhost&totalmemory=8589934592&clientmemoryusage=68891443...^@ + * ``` + */ +#define SCMP_PROTO_CMD_STATE "STATE" +#define SCMP_PROTO_CMD_STATE_HEADER_DESTINATION "D" +#define SCMP_PROTO_CMD_STATE_HEADER_CONTENT_LENGTH "L" + +#define SCMP_PROTO_CMD_FIRST_CHARS "CDSU" + +/** + * ``` + * CONNECTED + * Queue: [name of queue] + * Server: SeisComP/2017.334 + * Version: [server protocol version] + * Client-Name: [client name, either auto assigned or requested by the client] + * Authentication: [16 byte hex random NaCL nonce prefix] + * [32 byte hex NaCL public server key] + * [16 byte hex encrypted buffer: "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00"] + * Groups: [list of available groups] + * + * ^@ + * ``` + * + * In return to a **CONNECT** frame the server responds with a **CONNECTED** + * frame. It reports the client name in use for this connection and a list of + * available groups. + */ +#define SCMP_PROTO_REPLY_CONNECT "CONNECTED" +#define SCMP_PROTO_REPLY_CONNECT_HEADER_VERSION "Version" +#define SCMP_PROTO_REPLY_CONNECT_HEADER_SCHEMA_VERSION "Schema-Version" +#define SCMP_PROTO_REPLY_CONNECT_HEADER_QUEUE SCMP_PROTO_CMD_CONNECT_HEADER_QUEUE +#define SCMP_PROTO_REPLY_CONNECT_HEADER_CLIENT_NAME SCMP_PROTO_CMD_CONNECT_HEADER_CLIENT_NAME +#define SCMP_PROTO_REPLY_CONNECT_HEADER_ACK_WINDOW SCMP_PROTO_CMD_CONNECT_HEADER_ACK_WINDOW +#define SCMP_PROTO_REPLY_CONNECT_HEADER_GROUPS "Groups" + +/** + * The probably most important part of the protocol is receiving a message from + * a group or a client (peer-to-peer). + * + * ``` + * RECV + * C: [client name of sender] + * D: [name of group or the client] + * T: [MIME type] + * E: [transfer encoding] + * N: [message sequence number] + * L: [length of content] + * + * [payload]^@ + * ``` + * + * The payload can be anything, binary data or text. Optionally the *Content-Type* + * header is set to inform the client about the format. + */ +#define SCMP_PROTO_REPLY_SEND "RECV" +#define SCMP_PROTO_REPLY_SEND_HEADER_SENDER "C" +#define SCMP_PROTO_REPLY_SEND_HEADER_SEQ_NUMBER "N" +#define SCMP_PROTO_REPLY_SEND_HEADER_DESTINATION SCMP_PROTO_CMD_SEND_HEADER_DESTINATION +#define SCMP_PROTO_REPLY_SEND_HEADER_CONTENT_LENGTH SCMP_PROTO_CMD_SEND_HEADER_CONTENT_LENGTH +#define SCMP_PROTO_REPLY_SEND_HEADER_ENCODING SCMP_PROTO_CMD_SEND_HEADER_ENCODING +#define SCMP_PROTO_REPLY_SEND_HEADER_MIMETYPE SCMP_PROTO_CMD_SEND_HEADER_MIMETYPE + +/** + * ``` + * ACK + * N: [connection specific sequence number] + * + * ^@ + * ``` + * + * The server sends according to the configured acknoledgement window an + * acknowledgement frame to signal that all messages prior to the given sequence + * number have been processed and that the current sequence number is expected + * to be the one sent. It will do that as well after 1 second the client + * hasn't sent any further messages. + */ +#define SCMP_PROTO_REPLY_ACK "ACK" +#define SCMP_PROTO_REPLY_ACK_HEADER_SEQ_NUMBER SCMP_PROTO_REPLY_SEND_HEADER_SEQ_NUMBER + +/** + * ``` + * RECEIPT + * Receipt-Id: [id] + * + * ^@ + * ``` + * + * A receipt can basically be sent for anything which has an id. A receipt is + * being sent definitely after a disconnect request. In that case the receipt + * id is the username. + */ +#define SCMP_PROTO_REPLY_RECEIPT "RECEIPT" +#define SCMP_PROTO_REPLY_RECEIPT_HEADER_ID "Receipt-Id" + +/** + * A members enters a group. In response to a **SUBSCRIBE** command, the complete + * group information will be sent to the client. Specifically if + * ```Member == self```. Otherwise only the group and member information will be + * sent from the server to all clients that are subscribed to that group. The + * client needs to update its internal cache and the frame body is empty in that + * case. + * + * ``` + * ENTER + * D: [name of group] + * C: [name of client] + * + * clientA, clientB, ... + * }^@ + * ``` + */ +#define SCMP_PROTO_REPLY_ENTER "ENTER" +#define SCMP_PROTO_REPLY_ENTER_HEADER_GROUP "D" +#define SCMP_PROTO_REPLY_ENTER_HEADER_MEMBER "C" + +/** + * A member leaves a group. This message will sent from the server to all clients + * that are subscribed to the group in question. + * + * ``` + * LEAVE + * D: [name of group] + * C: [name of client] + * + * ^@ + * ``` + */ +#define SCMP_PROTO_REPLY_LEAVE "LEAVE" +#define SCMP_PROTO_REPLY_LEAVE_HEADER_GROUP SCMP_PROTO_REPLY_ENTER_HEADER_GROUP +#define SCMP_PROTO_REPLY_LEAVE_HEADER_MEMBER SCMP_PROTO_REPLY_ENTER_HEADER_MEMBER + +/** + * A member state of health information or simply its state including + * memory consumption, cpu usage, uptime and so on. The payload is always + * a key-value list separated by '&'. + * + * ``` + * STATE + * L: [length of content] + * D: [name of group or the client] + * C: [name of client] + * + * hostname=localhost&totalmemory=8589934592&clientmemoryusage=68891443...^@ + * ``` + */ +#define SCMP_PROTO_REPLY_STATE "STATE" +#define SCMP_PROTO_REPLY_STATE_HEADER_DESTINATION "D" +#define SCMP_PROTO_REPLY_STATE_HEADER_CLIENT "C" +#define SCMP_PROTO_REPLY_STATE_HEADER_CONTENT_LENGTH SCMP_PROTO_CMD_SEND_HEADER_CONTENT_LENGTH + +/** + * A client was disconnected. This message will sent from the server to all + * clients currently connected. + * + * ``` + * DISCONNECTED + * C: [name of client] + * + * ^@ + * ``` + */ +#define SCMP_PROTO_REPLY_DISCONNECTED "DISCONNECTED" +#define SCMP_PROTO_REPLY_DISCONNECTED_HEADER_CLIENT SCMP_PROTO_REPLY_STATE_HEADER_CLIENT + +/** + * ``` + * ERROR + * N: [connection specific sequence number] + * + * Error message ...^@ + * ``` + */ +#define SCMP_PROTO_REPLY_ERROR "ERROR" +#define SCMP_PROTO_REPLY_ERROR_HEADER_SEQ_NUMBER SCMP_PROTO_REPLY_SEND_HEADER_SEQ_NUMBER + + +} +} +} +} + + +#endif diff --git a/include/seiscomp/broker/queue.h b/include/seiscomp/broker/queue.h new file mode 100644 index 0000000..2af883a --- /dev/null +++ b/include/seiscomp/broker/queue.h @@ -0,0 +1,410 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * Author: Jan Becker * + * Email: jabe@gempa.de * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef SEISCOMP_BROKER_QUEUE_H__ +#define SEISCOMP_BROKER_QUEUE_H__ + + +#include +#include + +#include +#include +#include +#include +#include + +#include +#include + +#include +#include + + +namespace Seiscomp { +namespace Messaging { +namespace Broker { + + +class Client; +class MessageDispatcher; + +DEFINE_SMARTPOINTER(MessageProcessor); + + +/** + * @brief The Queue class implements the central messaging service. + * + * The Queue receives messages, queues them and distributes them to subscribed + * clients. + */ +class SC_BROKER_API Queue { + // ---------------------------------------------------------------------- + // Public types + // ---------------------------------------------------------------------- + public: + using StringList = std::vector; + using MessageProcessors = std::vector; + + using KeyValueCStrPair = MessageProcessor::KeyValueCStrPair; + using KeyCStrValues = MessageProcessor::KeyCStrValues; + + using KeyValuePair = MessageProcessor::KeyValuePair; + using KeyValues = MessageProcessor::KeyValues; + + enum Constants { + MaxAdditionalParams = MessageProcessor::MaxAdditionalParams + }; + + MAKEENUM( + Result, + EVALUES( + Success, + InternalError, + ClientNameNotUnique, + ClientNotAccepted, + GroupNameNotUnique, + GroupDoesNotExist, + GroupAlreadySubscribed, + GroupNotSubscribed, + MessageNotAccepted, + MessageDecodingFailed, + MessageEncodingFailed, + NotEnoughClientHeap + ), + ENAMES( + "Success", + "Internal error", + "Client name is not unique", + "Client was not accepted", + "Group name is not unique", + "Group does not exist", + "Already subscribed to group", + "Not subscribed to group", + "Message not accepted", + "Message could not be decoded", + "Message could not be encoded", + "Not enough client heap" + ) + ); + + const std::string StatusGroup = "STATUS_GROUP"; + + + // ---------------------------------------------------------------------- + // X'truction + // ---------------------------------------------------------------------- + public: + //! C'tor + Queue(const std::string &name, uint64_t maxPayloadSize); + ~Queue(); + + + // ---------------------------------------------------------------------- + // Public interface + // ---------------------------------------------------------------------- + public: + /** + * @return The queue name + */ + const std::string &name() const; + + /** + * @brief Adds a message processor to the list of processors. + * @param proc The processor instance which is managed by the queue + * with a smart pointer. + * @return Success flag + */ + bool add(MessageProcessor *proc); + + /** + * @brief Adds a group/topic to the queue. + * @param name The name of the group + * @return true on success, false otherwise + */ + Result addGroup(const std::string &name); + + /** + * @brief Returns a list of available group names + * @return The list of names + */ + const StringList &groups() const { return _groupNames; } + + /** + * @brief Return the sender name of the queue. + * @return A NULL terminated const string + */ + const char *senderName() const; + + /** + * @brief Sets the message dispatcher for thread synchronisation. + * + * The queue runs a thread to process messages via plugins. If the + * message is processed the thread notifies the queue about it. The + * queue could now call publish but that is probably not thread-safe + * and inefficient to implement on each subscriber. The message + * dispatcher receives a notification about a new message and can then + * implement any inter-thread communication to publish the message in + * the same context as it has been created. + * + * @param dispatcher The dispatcher instance not managed by the queue. + */ + void setMessageDispatcher(MessageDispatcher *dispatcher); + + /** + * @brief Subscribe a client to a particular group + * @param client The client + * @param group The name of the group + * @return The result code + */ + Result subscribe(Client *client, const std::string &group); + + /** + * @brief Unsubscribes a client from a particular group + * @param client The client + * @param group The name of the group + * @return The result code + */ + Result unsubscribe(Client *client, const std::string &group); + + /** + * @brief Returns a buffered message after a particular sequence number + * @param sequenceNumber The sequence number to continue with. + * + * The returned message must have a sequence number greater than + * this parameter or lower if a wrap has occured but never the + * same. + * @param client The client instance to filter subscriptions for + * @return A message pointer or NULL if no message is available + */ + Message *getMessage(SequenceNumber sequenceNumber, + const Client *client) const; + + /** + * @brief Pushes a message from a client to the queue + * + * This method is called from Client subclasses that received a message + * through their transport protocol. The message pointer will either + * be managed in a smart pointer or deleted. If false is returned the + * caller must take care of deleting the message. + * + * @param sender The sender instance + * @param msg The message + * @param packetLength The size in bytes of the received packet including + * protocol specific header data. This is only + * used for statistics. + * @return The result code + */ + Result push(Client *sender, Message *msg, int packetSize = 0); + + /** + * @brief Activates the queue and starts the processing thread. + */ + void activate(); + + /** + * @brief Shutdown the queue and finished the processing thread if + * running. + * + * This will also shutdown all processors associated with the queue. + * + * Note that this call can block depending how many plugins are + * running and currently processing a message. This method waits until + * the processing thread is finished. + */ + void shutdown(); + + /** + * @brief Callback to notify the queue about some timeout. + * + * This function is used to check expiration of outstanding + * acknowledgement messages. This function is not thread-safe and + * must be called from within the thread the queue is running in. + */ + void timeout(); + + /** + * @brief Populates the passed statistics structure. + * @param stats[out] The target structure + * @param reset[in] Whether to reset the internal statistics or not. + */ + void getStatisticsSnapshot(QueueStatistics &stats, bool reset = true); + + + // ---------------------------------------------------------------------- + // Client memory interface + // ---------------------------------------------------------------------- + public: + /** + * @brief Allocates additional client heap. Once allocated the heap + * cannot be free'd anymore. This is mainly used for plugins + * that are initialized once and need to store additional + * data in a client structure. + * @param bytes The number of bytes to allocate + * @return An offset to the local client heap or a negative number + * in case of an error. The absolute value (-result) of the + * error translates to a status code (@Result). + */ + int allocateClientHeap(int bytes); + + + // ---------------------------------------------------------------------- + // Publisher interface + // ---------------------------------------------------------------------- + public: + /** + * @brief Registers a client in the queue and sets up the PubSub + * connections. + * + * This is called when the client calls connect and is part of the + * PublisherBase interface. + * @param client The client to be registered + * @param slot The slot + * @return The result code + */ + Result connect(Client *client, const KeyCStrValues params, int paramCount, + KeyValues &outParams); + + /** + * @brief Deregisters a client from the queue and clears the PubSub + * connections. + * + * This is called when the client calls disconnect and is part of the + * PublisherBase interface. + * @param client The client to be deregistered + * @return The result code + */ + Result disconnect(Client *client); + + + // ---------------------------------------------------------------------- + // Settings interface + // ---------------------------------------------------------------------- + public: + uint64_t maxPayloadSize() const; + + + // ---------------------------------------------------------------------- + // Private interface + // ---------------------------------------------------------------------- + private: + using ProcessingTask = std::pair; + using TaskQueue = Utils::BlockingDequeue; + + /** + * @brief Publishes a message from a client to all registered clients + * + * This method is called from Client subclasses that received a message + * through their transport protocol. + * + * @param sender The sender instance + * @param msg The message + * @return true on success, false otherwise + */ + bool publish(Client *sender, Message *msg); + + /** + * @brief Pops all messages from the processing queue and publishes them. + * + * This call does not block. + */ + void flushProcessedMessages(); + + /** + * @brief The processing loop running in a different thread. + */ + void processingLoop(); + + /** + * @brief Processes a message e.g. via plugins. + * @param task The task to be processed + */ + void process(ProcessingTask &task); + + /** + * @brief Called from the processing thread informing the queue that + * the message is processed and can be forwarded to clients. + * @param task The task + */ + void taskReady(const ProcessingTask &task); + + /** + * @brief Replaces the incoming message with a response + * @param task The task to be updated + */ + void returnToSender(Message *msg, Core::BaseObject *obj); + + + // ---------------------------------------------------------------------- + // Private members + // ---------------------------------------------------------------------- + private: + using Groups = std::map; + using MessageRing = circular_buffer; + using ClientNames = KHashSet; + using Clients = KHashMap; + + std::string _name; + MessageProcessors _processors; + MessageProcessors _connectionProcessors; + MessageProcessors _messageProcessors; + MessageDispatcher *_processedMessageDispatcher; + SequenceNumber _sequenceNumber; + Groups _groups; + StringList _groupNames; + MessageRing _messages; + Clients _clients; + std::thread *_messageProcessor; + TaskQueue _tasks; + TaskQueue _results; + Core::Time _created; + Core::Time _lastSOHTimestamp; + int _allocatedClientHeap; + int _sohInterval; + int _inactivityLimit; + uint64_t _maxPayloadSize; + mutable Tx _txMessages; + mutable Tx _txBytes; + mutable Tx _txPayload; + + + friend class MessageDispatcher; +}; + + +inline const std::string &Queue::name() const { + return _name; +} + + +inline uint64_t Queue::maxPayloadSize() const { + return _maxPayloadSize; +} + + +} +} +} + + +#endif diff --git a/include/seiscomp/broker/statistics.h b/include/seiscomp/broker/statistics.h new file mode 100644 index 0000000..a89e6d5 --- /dev/null +++ b/include/seiscomp/broker/statistics.h @@ -0,0 +1,109 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * Author: Jan Becker * + * Email: jabe@gempa.de * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef SEISCOMP_BROKER_STATISTICS_H__ +#define SEISCOMP_BROKER_STATISTICS_H__ + + +#include +#include + + +namespace Seiscomp { +namespace Messaging { +namespace Broker { + + +/** + * @brief Simple structure to store transfer counts. + * The unit of the counters is not defined. It can be counts or + * bytes or something different. That depends on the context. + */ +struct SC_BROKER_API Tx : Core::BaseObject { + Tx() : received(0), sent(0) {} + + double received; //!< Number of items received + double sent; //!< Number of items sent + + Tx &operator+=(const Tx &other) { + received += other.received; + sent += other.sent; + return *this; + } + + DECLARE_SERIALIZATION { + ar + & NAMED_OBJECT("recv", received) + & NAMED_OBJECT("sent", sent) + ; + } +}; + + +struct GroupStatistics : Core::BaseObject { + std::string name; + Tx messages; + Tx bytes; + Tx payload; + + DECLARE_SERIALIZATION { + ar + & NAMED_OBJECT("name", name) + & NAMED_OBJECT_HINT("messages", messages, Archive::STATIC_TYPE) + & NAMED_OBJECT_HINT("bytes", bytes, Archive::STATIC_TYPE) + & NAMED_OBJECT_HINT("payload", payload, Archive::STATIC_TYPE) + ; + } +}; + + +DEFINE_SMARTPOINTER(QueueStatistics); +struct SC_BROKER_API QueueStatistics : Core::BaseObject { + typedef std::vector Groups; + std::string name; + Groups groups; + Tx messages; + Tx bytes; + Tx payload; + + QueueStatistics &operator+=(const QueueStatistics &stats); + + DECLARE_SERIALIZATION { + ar + & NAMED_OBJECT("name", name) + & NAMED_OBJECT_HINT("messages", messages, Archive::STATIC_TYPE) + & NAMED_OBJECT_HINT("bytes", bytes, Archive::STATIC_TYPE) + & NAMED_OBJECT_HINT("payload", payload, Archive::STATIC_TYPE) + & NAMED_OBJECT_HINT("groups", groups, Archive::STATIC_TYPE) + ; + } +}; + + + +} +} +} + + +#endif diff --git a/include/seiscomp/broker/utils/circular.h b/include/seiscomp/broker/utils/circular.h new file mode 100644 index 0000000..64e8cef --- /dev/null +++ b/include/seiscomp/broker/utils/circular.h @@ -0,0 +1,496 @@ +/****************************************************************************** + * Author: Pete Goodliffe + * + * ---------------------------------------------------------------------------- + * Copyright 2002 Pete Goodliffe All rights reserved. + * + * ---------------------------------------------------------------------------- + * Purpose: STL-style circular buffer + * + * Formatting changed by jabe@gempa.de + *****************************************************************************/ + + +#ifndef CIRCULAR_BUFFER_H +#define CIRCULAR_BUFFER_H + + +#include +#include +#include +#include + + +/****************************************************************************** + * Iterators + *****************************************************************************/ + +/** + * Iterator type for the circular_buffer class. + * + * This one template class provides all variants of forward/reverse + * const/non const iterators through plentiful template magic. + * + * You don't need to instantiate it directly, use the good public functions + * availble in circular_buffer. + */ +template //+ const for const iter +class circular_buffer_iterator { + public: + typedef circular_buffer_iterator self_type; + typedef T cbuf_type; + typedef std::random_access_iterator_tag iterator_category; + typedef typename cbuf_type::value_type value_type; + typedef typename cbuf_type::size_type size_type; + typedef typename cbuf_type::pointer pointer; + typedef typename cbuf_type::const_pointer const_pointer; + typedef typename cbuf_type::reference reference; + typedef typename cbuf_type::const_reference const_reference; + typedef typename cbuf_type::difference_type difference_type; + + circular_buffer_iterator(cbuf_type *b, size_type p) + : buf_(b), pos_(p) {} + + // Converting a non-const iterator to a const iterator + circular_buffer_iterator(const circular_buffer_iterator &other) + : buf_(other.buf_), pos_(other.pos_) {} + + friend class circular_buffer_iterator ; + + // Use compiler generated copy ctor, copy assignment operator and dtor + + elem_type &operator*() { + return (*buf_)[pos_]; + } + + elem_type *operator->() { + return &(operator*()); + } + + self_type &operator++() { + pos_ += 1; + return *this; + } + + self_type operator++(int) { + self_type tmp(*this); + ++(*this); + return tmp; + } + + self_type &operator--() { + pos_ -= 1; + return *this; + } + + self_type operator--(int) { + self_type tmp(*this); + --(*this); + return tmp; + } + + self_type operator+(difference_type n) const { + self_type tmp(*this); + tmp.pos_ += n; + return tmp; + } + + self_type &operator+=(difference_type n) { + pos_ += n; + return *this; + } + + self_type operator-(difference_type n) const { + self_type tmp(*this); + tmp.pos_ -= n; + return tmp; + } + + self_type &operator-=(difference_type n) { + pos_ -= n; + return *this; + } + + difference_type operator-(const self_type &c) const { + return pos_ - c.pos_; + } + + bool operator==(const self_type &other) const { + return pos_ == other.pos_ && buf_ == other.buf_; + } + bool operator!=(const self_type &other) const { + return pos_ != other.pos_ && buf_ == other.buf_; + } + bool operator>(const self_type &other) const { + return pos_ > other.pos_; + } + bool operator>=(const self_type &other) const { + return pos_ >= other.pos_; + } + bool operator<(const self_type &other) const { + return pos_ < other.pos_; + } + bool operator<=(const self_type &other) const { + return pos_ <= other.pos_; + } + + private: + cbuf_type *buf_; + size_type pos_; +}; + + +template +circular_buffer_iterator_t operator+( + const typename circular_buffer_iterator_t::difference_type &a, + const circular_buffer_iterator_t &b) { + return circular_buffer_iterator_t(a) + b; +} + +template +circular_buffer_iterator_t operator-( + const typename circular_buffer_iterator_t::difference_type &a, + const circular_buffer_iterator_t &b) { + return circular_buffer_iterator_t(a) - b; +} + + +/****************************************************************************** + * circular_buffer + *****************************************************************************/ + +/** + * This class provides a circular buffer in the STL style. + * + * You can add data to the end using the @ref push_back function, read data + * using @ref front() and remove data using @ref pop_front(). + * + * The class also provides random access through the @ref operator[]() + * function and its random access iterator. Subscripting the array with + * an invalid (out of range) index number leads to undefined results, both + * for reading and writing. + * + * This class template accepts three template parameters: + *
  • T The type of object contained + *
  • always_accept_data_when_full Determines the behaviour of + * @ref push_back when the buffer is full. + * Set to true new data is always added, the + * old "end" data is thrown away. + * Set to false, the new data is not added. + * No error is returned neither is an + * exception raised. + *
  • Alloc Allocator type to use (in line with other + * STL containers). + * + * @short STL style circule buffer + * @author Pete Goodliffe + * @version 1.00 + */ +template > +class circular_buffer { + public: + enum { + version_major = 1, version_minor = 0 + }; + + // Typedefs + typedef circular_buffer self_type; + + typedef Alloc allocator_type; + + typedef typename Alloc::value_type value_type; + typedef typename Alloc::pointer pointer; + typedef typename Alloc::const_pointer const_pointer; + typedef typename Alloc::reference reference; + typedef typename Alloc::const_reference const_reference; + + typedef typename Alloc::size_type size_type; + typedef typename Alloc::difference_type difference_type; + + typedef circular_buffer_iterator iterator; + typedef circular_buffer_iterator const_iterator; + typedef std::reverse_iterator reverse_iterator; + typedef std::reverse_iterator const_reverse_iterator; + + // Lifetime + enum { + default_capacity = 100 + }; + + explicit circular_buffer(size_type capacity = default_capacity) + : array_(alloc_.allocate(capacity)) + , array_size_(capacity) + , head_(1) + , tail_(0) + , contents_size_(0) {} + + circular_buffer(const circular_buffer &other) + : array_(alloc_.allocate(other.array_size_)) + , array_size_(other.array_size_), head_(other.head_) + , tail_(other.tail_), contents_size_(other.contents_size_) { + try { + assign_into(other.begin(), other.end()); + } + catch ( ... ) { + destroy_all_elements(); + alloc_.deallocate(array_, array_size_); + throw; + } + } + + template + circular_buffer(InputIterator from, InputIterator to) + : array_(alloc_.allocate(1)), array_size_(1) + , head_(1), tail_(0), contents_size_(0) { + circular_buffer tmp; + tmp.assign_into_reserving(from, to); + swap(tmp); + } + + ~circular_buffer() { + destroy_all_elements(); + alloc_.deallocate(array_, array_size_); + } + + circular_buffer &operator=(const self_type &other) { + circular_buffer tmp(other); + swap(tmp); + return *this; + } + + void swap(circular_buffer &other) { + std::swap(array_, other.array_); + std::swap(array_size_, other.array_size_); + std::swap(head_, other.head_); + std::swap(tail_, other.tail_); + std::swap(contents_size_, other.contents_size_); + } + + allocator_type get_allocator() const { + return alloc_; + } + + // Iterators + iterator begin() { + return iterator(this, 0); + } + + iterator end() { + return iterator(this, size()); + } + + const_iterator begin() const { + return const_iterator(this, 0); + } + + const_iterator end() const { + return const_iterator(this, size()); + } + + reverse_iterator rbegin() { + return reverse_iterator(end()); + } + + reverse_iterator rend() { + return reverse_iterator(begin()); + } + + const_reverse_iterator rbegin() const { + return const_reverse_iterator(end()); + } + + const_reverse_iterator rend() const { + return const_reverse_iterator(begin()); + } + + // Size + size_type size() const { + return contents_size_; + } + + size_type capacity() const { + return array_size_; + } + + bool empty() const { + return !contents_size_; + } + + size_type max_size() const { + return alloc_.max_size(); + } + + void reserve(size_type new_size) { + if ( capacity() < new_size ) { + circular_buffer tmp(new_size); + tmp.assign_into(begin(), end()); + swap(tmp); + } + } + + // Accessing + reference front() { + return array_[head_]; + } + + reference back() { + return array_[tail_]; + } + + const_reference front() const { + return array_[head_]; + } + + const_reference back() const { + return array_[tail_]; + } + + void push_back(const value_type &item) { + size_type next = next_tail(); + if ( contents_size_ == array_size_ ) { + if ( always_accept_data_when_full ) { + array_[next] = item; + increment_head(); + } + } + else { + alloc_.construct(array_ + next, item); + } + increment_tail(); + } + + void pop_front() { + size_type destroy_pos = head_; + increment_head(); + alloc_.destroy(array_ + destroy_pos); + } + + void clear() { + for ( size_type n = 0; n < contents_size_; ++n ) { + alloc_.destroy(array_ + index_to_subscript(n)); + } + head_ = 1; + tail_ = contents_size_ = 0; + } + + reference operator[](size_type n) { + return at_unchecked(n); + } + + const_reference operator[](size_type n) const { + return at_unchecked(n); + } + + reference at(size_type n) { + return at_checked(n); + } + + const_reference at(size_type n) const { + return at_checked(n); + } + + + private: + reference at_unchecked(size_type index) const { + return array_[index_to_subscript(index)]; + } + + reference at_checked(size_type index) const { + if ( index >= contents_size_ ) { + throw std::out_of_range("index out of bounds"); + } + return at_unchecked(index); + } + + // Rounds an unbounded to an index into array_ + size_type normalise(size_type n) const { + return n % array_size_; + } + + // Converts external index to an array subscript + size_type index_to_subscript(size_type index) const { + return normalise(index + head_); + } + + void increment_tail() { + ++contents_size_; + tail_ = next_tail(); + } + + size_type next_tail() { + return (tail_ + 1 == array_size_) ? 0 : tail_ + 1; + } + + void increment_head() { + // precondition: !empty() + ++head_; + --contents_size_; + if ( head_ == array_size_ ) + head_ = 0; + } + + template + void assign_into(f_iter from, f_iter to) { + if ( contents_size_ ) + clear(); + while ( from != to ) { + push_back(*from); + ++from; + } + } + + template + void assign_into_reserving(f_iter from, f_iter to) { + if ( contents_size_ ) + clear(); + + while ( from != to ) { + if ( contents_size_ == array_size_ ) { + reserve(static_cast(array_size_ * 1.5)); + } + + push_back(*from); + ++from; + } + } + + void destroy_all_elements() { + for ( size_type n = 0; n < contents_size_; ++n ) { + alloc_.destroy(array_ + index_to_subscript(n)); + } + } + + allocator_type alloc_; + value_type *array_; + size_type array_size_; + size_type head_; + size_type tail_; + size_type contents_size_; +}; + + +template +bool operator==(const circular_buffer &a, + const circular_buffer &b) { + return a.size() == b.size() && std::equal(a.begin(), a.end(), b.begin()); +} + +template +bool operator!=(const circular_buffer &a, + const circular_buffer &b) { + return a.size() != b.size() || !std::equal(a.begin(), a.end(), b.begin()); +} + +template +bool operator<(const circular_buffer &a, + const circular_buffer &b) { + return std::lexicographical_compare(a.begin(), a.end(), b.begin(), b.end()); +} + + +#endif diff --git a/include/seiscomp/broker/utils/khash.h b/include/seiscomp/broker/utils/khash.h new file mode 100644 index 0000000..06fc7a3 --- /dev/null +++ b/include/seiscomp/broker/utils/khash.h @@ -0,0 +1,627 @@ +/* The MIT License + + Copyright (c) 2008, 2009, 2011 by Attractive Chaos + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS + BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN + ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN + CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +*/ + +/* + An example: + +#include "khash.h" +KHASH_MAP_INIT_INT(32, char) +int main() { + int ret, is_missing; + khiter_t k; + khash_t(32) *h = kh_init(32); + k = kh_put(32, h, 5, &ret); + kh_value(h, k) = 10; + k = kh_get(32, h, 10); + is_missing = (k == kh_end(h)); + k = kh_get(32, h, 5); + kh_del(32, h, k); + for (k = kh_begin(h); k != kh_end(h); ++k) + if (kh_exist(h, k)) kh_value(h, k) = 1; + kh_destroy(32, h); + return 0; +} +*/ + +/* + 2013-05-02 (0.2.8): + + * Use quadratic probing. When the capacity is power of 2, stepping function + i*(i+1)/2 guarantees to traverse each bucket. It is better than double + hashing on cache performance and is more robust than linear probing. + + In theory, double hashing should be more robust than quadratic probing. + However, my implementation is probably not for large hash tables, because + the second hash function is closely tied to the first hash function, + which reduce the effectiveness of double hashing. + + Reference: http://research.cs.vt.edu/AVresearch/hashing/quadratic.php + + 2011-12-29 (0.2.7): + + * Minor code clean up; no actual effect. + + 2011-09-16 (0.2.6): + + * The capacity is a power of 2. This seems to dramatically improve the + speed for simple keys. Thank Zilong Tan for the suggestion. Reference: + + - http://code.google.com/p/ulib/ + - http://nothings.org/computer/judy/ + + * Allow to optionally use linear probing which usually has better + performance for random input. Double hashing is still the default as it + is more robust to certain non-random input. + + * Added Wang's integer hash function (not used by default). This hash + function is more robust to certain non-random input. + + 2011-02-14 (0.2.5): + + * Allow to declare global functions. + + 2009-09-26 (0.2.4): + + * Improve portability + + 2008-09-19 (0.2.3): + + * Corrected the example + * Improved interfaces + + 2008-09-11 (0.2.2): + + * Improved speed a little in kh_put() + + 2008-09-10 (0.2.1): + + * Added kh_clear() + * Fixed a compiling error + + 2008-09-02 (0.2.0): + + * Changed to token concatenation which increases flexibility. + + 2008-08-31 (0.1.2): + + * Fixed a bug in kh_get(), which has not been tested previously. + + 2008-08-31 (0.1.1): + + * Added destructor +*/ + + +#ifndef __AC_KHASH_H +#define __AC_KHASH_H + +/*! + @header + + Generic hash table library. + */ + +#define AC_VERSION_KHASH_H "0.2.8" + +#include +#include +#include + +/* compiler specific configuration */ + +#if UINT_MAX == 0xffffffffu +typedef unsigned int khint32_t; +#elif ULONG_MAX == 0xffffffffu +typedef unsigned long khint32_t; +#endif + +#if ULONG_MAX == ULLONG_MAX +typedef unsigned long khint64_t; +#else +typedef unsigned long long khint64_t; +#endif + +#ifndef kh_inline +#ifdef _MSC_VER +#define kh_inline __inline +#else +#define kh_inline inline +#endif +#endif /* kh_inline */ + +#ifndef klib_unused +#if (defined __clang__ && __clang_major__ >= 3) || (defined __GNUC__ && __GNUC__ >= 3) +#define klib_unused __attribute__ ((__unused__)) +#else +#define klib_unused +#endif +#endif /* klib_unused */ + +typedef khint32_t khint_t; +typedef khint_t khiter_t; + +#define __ac_isempty(flag, i) ((flag[i>>4]>>((i&0xfU)<<1))&2) +#define __ac_isdel(flag, i) ((flag[i>>4]>>((i&0xfU)<<1))&1) +#define __ac_iseither(flag, i) ((flag[i>>4]>>((i&0xfU)<<1))&3) +#define __ac_set_isdel_false(flag, i) (flag[i>>4]&=~(1ul<<((i&0xfU)<<1))) +#define __ac_set_isempty_false(flag, i) (flag[i>>4]&=~(2ul<<((i&0xfU)<<1))) +#define __ac_set_isboth_false(flag, i) (flag[i>>4]&=~(3ul<<((i&0xfU)<<1))) +#define __ac_set_isdel_true(flag, i) (flag[i>>4]|=1ul<<((i&0xfU)<<1)) + +#define __ac_fsize(m) ((m) < 16? 1 : (m)>>4) + +#ifndef kroundup32 +#define kroundup32(x) (--(x), (x)|=(x)>>1, (x)|=(x)>>2, (x)|=(x)>>4, (x)|=(x)>>8, (x)|=(x)>>16, ++(x)) +#endif + +#ifndef kcalloc +#define kcalloc(N,Z) calloc(N,Z) +#endif +#ifndef kmalloc +#define kmalloc(Z) malloc(Z) +#endif +#ifndef krealloc +#define krealloc(P,Z) realloc(P,Z) +#endif +#ifndef kfree +#define kfree(P) free(P) +#endif + +static const double __ac_HASH_UPPER = 0.77; + +#define __KHASH_TYPE(name, khkey_t, khval_t) \ + typedef struct kh_##name##_s { \ + khint_t n_buckets, size, n_occupied, upper_bound; \ + khint32_t *flags; \ + khkey_t *keys; \ + khval_t *vals; \ + } kh_##name##_t; + +#define __KHASH_PROTOTYPES(name, khkey_t, khval_t) \ + extern kh_##name##_t *kh_init_##name(void); \ + extern void kh_destroy_##name(kh_##name##_t *h); \ + extern void kh_clear_##name(kh_##name##_t *h); \ + extern khint_t kh_get_##name(const kh_##name##_t *h, khkey_t key); \ + extern int kh_resize_##name(kh_##name##_t *h, khint_t new_n_buckets); \ + extern khint_t kh_put_##name(kh_##name##_t *h, khkey_t key, int *ret); \ + extern void kh_del_##name(kh_##name##_t *h, khint_t x); + +#define __KHASH_IMPL(name, SCOPE, khkey_t, khval_t, kh_is_map, __hash_func, __hash_equal) \ + SCOPE kh_##name##_t *kh_init_##name(void) { \ + return (kh_##name##_t*)kcalloc(1, sizeof(kh_##name##_t)); \ + } \ + SCOPE void kh_destroy_##name(kh_##name##_t *h) \ + { \ + if (h) { \ + kfree((void *)h->keys); kfree(h->flags); \ + kfree((void *)h->vals); \ + kfree(h); \ + } \ + } \ + SCOPE void kh_clear_##name(kh_##name##_t *h) \ + { \ + if (h && h->flags) { \ + memset(h->flags, 0xaa, __ac_fsize(h->n_buckets) * sizeof(khint32_t)); \ + h->size = h->n_occupied = 0; \ + } \ + } \ + SCOPE khint_t kh_get_##name(const kh_##name##_t *h, khkey_t key) \ + { \ + if (h->n_buckets) { \ + khint_t k, i, last, mask, step = 0; \ + mask = h->n_buckets - 1; \ + k = __hash_func(key); i = k & mask; \ + last = i; \ + while (!__ac_isempty(h->flags, i) && (__ac_isdel(h->flags, i) || !__hash_equal(h->keys[i], key))) { \ + i = (i + (++step)) & mask; \ + if (i == last) return h->n_buckets; \ + } \ + return __ac_iseither(h->flags, i)? h->n_buckets : i; \ + } else return 0; \ + } \ + SCOPE int kh_resize_##name(kh_##name##_t *h, khint_t new_n_buckets) \ + { /* This function uses 0.25*n_buckets bytes of working space instead of [sizeof(key_t+val_t)+.25]*n_buckets. */ \ + khint32_t *new_flags = 0; \ + khint_t j = 1; \ + { \ + kroundup32(new_n_buckets); \ + if (new_n_buckets < 4) new_n_buckets = 4; \ + if (h->size >= (khint_t)(new_n_buckets * __ac_HASH_UPPER + 0.5)) j = 0; /* requested size is too small */ \ + else { /* hash table size to be changed (shrink or expand); rehash */ \ + new_flags = (khint32_t*)kmalloc(__ac_fsize(new_n_buckets) * sizeof(khint32_t)); \ + if (!new_flags) return -1; \ + memset(new_flags, 0xaa, __ac_fsize(new_n_buckets) * sizeof(khint32_t)); \ + if (h->n_buckets < new_n_buckets) { /* expand */ \ + khkey_t *new_keys = (khkey_t*)krealloc((void *)h->keys, new_n_buckets * sizeof(khkey_t)); \ + if (!new_keys) { kfree(new_flags); return -1; } \ + h->keys = new_keys; \ + if (kh_is_map) { \ + khval_t *new_vals = (khval_t*)krealloc((void *)h->vals, new_n_buckets * sizeof(khval_t)); \ + if (!new_vals) { kfree(new_flags); return -1; } \ + h->vals = new_vals; \ + } \ + } /* otherwise shrink */ \ + } \ + } \ + if (j) { /* rehashing is needed */ \ + for (j = 0; j != h->n_buckets; ++j) { \ + if (__ac_iseither(h->flags, j) == 0) { \ + khkey_t key = h->keys[j]; \ + khval_t val; \ + khint_t new_mask; \ + new_mask = new_n_buckets - 1; \ + if (kh_is_map) val = h->vals[j]; \ + __ac_set_isdel_true(h->flags, j); \ + while (1) { /* kick-out process; sort of like in Cuckoo hashing */ \ + khint_t k, i, step = 0; \ + k = __hash_func(key); \ + i = k & new_mask; \ + while (!__ac_isempty(new_flags, i)) i = (i + (++step)) & new_mask; \ + __ac_set_isempty_false(new_flags, i); \ + if (i < h->n_buckets && __ac_iseither(h->flags, i) == 0) { /* kick out the existing element */ \ + { khkey_t tmp = h->keys[i]; h->keys[i] = key; key = tmp; } \ + if (kh_is_map) { khval_t tmp = h->vals[i]; h->vals[i] = val; val = tmp; } \ + __ac_set_isdel_true(h->flags, i); /* mark it as deleted in the old hash table */ \ + } else { /* write the element and jump out of the loop */ \ + h->keys[i] = key; \ + if (kh_is_map) h->vals[i] = val; \ + break; \ + } \ + } \ + } \ + } \ + if (h->n_buckets > new_n_buckets) { /* shrink the hash table */ \ + h->keys = (khkey_t*)krealloc((void *)h->keys, new_n_buckets * sizeof(khkey_t)); \ + if (kh_is_map) h->vals = (khval_t*)krealloc((void *)h->vals, new_n_buckets * sizeof(khval_t)); \ + } \ + kfree(h->flags); /* free the working space */ \ + h->flags = new_flags; \ + h->n_buckets = new_n_buckets; \ + h->n_occupied = h->size; \ + h->upper_bound = (khint_t)(h->n_buckets * __ac_HASH_UPPER + 0.5); \ + } \ + return 0; \ + } \ + SCOPE khint_t kh_put_##name(kh_##name##_t *h, khkey_t key, int *ret) \ + { \ + khint_t x; \ + if (h->n_occupied >= h->upper_bound) { /* update the hash table */ \ + if (h->n_buckets > (h->size<<1)) { \ + if (kh_resize_##name(h, h->n_buckets - 1) < 0) { /* clear "deleted" elements */ \ + *ret = -1; return h->n_buckets; \ + } \ + } else if (kh_resize_##name(h, h->n_buckets + 1) < 0) { /* expand the hash table */ \ + *ret = -1; return h->n_buckets; \ + } \ + } /* TODO: to implement automatically shrinking; resize() already support shrinking */ \ + { \ + khint_t k, i, site, last, mask = h->n_buckets - 1, step = 0; \ + x = site = h->n_buckets; k = __hash_func(key); i = k & mask; \ + if (__ac_isempty(h->flags, i)) x = i; /* for speed up */ \ + else { \ + last = i; \ + while (!__ac_isempty(h->flags, i) && (__ac_isdel(h->flags, i) || !__hash_equal(h->keys[i], key))) { \ + if (__ac_isdel(h->flags, i)) site = i; \ + i = (i + (++step)) & mask; \ + if (i == last) { x = site; break; } \ + } \ + if (x == h->n_buckets) { \ + if (__ac_isempty(h->flags, i) && site != h->n_buckets) x = site; \ + else x = i; \ + } \ + } \ + } \ + if (__ac_isempty(h->flags, x)) { /* not present at all */ \ + h->keys[x] = key; \ + __ac_set_isboth_false(h->flags, x); \ + ++h->size; ++h->n_occupied; \ + *ret = 1; \ + } else if (__ac_isdel(h->flags, x)) { /* deleted */ \ + h->keys[x] = key; \ + __ac_set_isboth_false(h->flags, x); \ + ++h->size; \ + *ret = 2; \ + } else *ret = 0; /* Don't touch h->keys[x] if present and not deleted */ \ + return x; \ + } \ + SCOPE void kh_del_##name(kh_##name##_t *h, khint_t x) \ + { \ + if (x != h->n_buckets && !__ac_iseither(h->flags, x)) { \ + __ac_set_isdel_true(h->flags, x); \ + --h->size; \ + } \ + } + +#define KHASH_DECLARE(name, khkey_t, khval_t) \ + __KHASH_TYPE(name, khkey_t, khval_t) \ + __KHASH_PROTOTYPES(name, khkey_t, khval_t) + +#define KHASH_INIT2(name, SCOPE, khkey_t, khval_t, kh_is_map, __hash_func, __hash_equal) \ + __KHASH_TYPE(name, khkey_t, khval_t) \ + __KHASH_IMPL(name, SCOPE, khkey_t, khval_t, kh_is_map, __hash_func, __hash_equal) + +#define KHASH_INIT(name, khkey_t, khval_t, kh_is_map, __hash_func, __hash_equal) \ + KHASH_INIT2(name, static kh_inline klib_unused, khkey_t, khval_t, kh_is_map, __hash_func, __hash_equal) + +/* --- BEGIN OF HASH FUNCTIONS --- */ + +/*! @function + @abstract Integer hash function + @param key The integer [khint32_t] + @return The hash value [khint_t] + */ +#define kh_int_hash_func(key) (khint32_t)(key) +/*! @function + @abstract Integer comparison function + */ +#define kh_int_hash_equal(a, b) ((a) == (b)) +/*! @function + @abstract 64-bit integer hash function + @param key The integer [khint64_t] + @return The hash value [khint_t] + */ +#define kh_int64_hash_func(key) (khint32_t)((key)>>33^(key)^(key)<<11) +/*! @function + @abstract 64-bit integer comparison function + */ +#define kh_int64_hash_equal(a, b) ((a) == (b)) +/*! @function + @abstract const char* hash function + @param s Pointer to a null terminated string + @return The hash value + */ +static kh_inline khint_t __ac_X31_hash_string(const char *s) +{ + khint_t h = (khint_t)*s; + if (h) for (++s ; *s; ++s) h = (h << 5) - h + (khint_t)*s; + return h; +} +/*! @function + @abstract Another interface to const char* hash function + @param key Pointer to a null terminated string [const char*] + @return The hash value [khint_t] + */ +#define kh_str_hash_func(key) __ac_X31_hash_string(key) +/*! @function + @abstract Const char* comparison function + */ +#define kh_str_hash_equal(a, b) (strcmp(a, b) == 0) + +static kh_inline khint_t __ac_Wang_hash(khint_t key) +{ + key += ~(key << 15); + key ^= (key >> 10); + key += (key << 3); + key ^= (key >> 6); + key += ~(key << 11); + key ^= (key >> 16); + return key; +} +#define kh_int_hash_func2(k) __ac_Wang_hash((khint_t)key) + +/* --- END OF HASH FUNCTIONS --- */ + +/* Other convenient macros... */ + +/*! + @abstract Type of the hash table. + @param name Name of the hash table [symbol] + */ +#define khash_t(name) kh_##name##_t + +/*! @function + @abstract Initiate a hash table. + @param name Name of the hash table [symbol] + @return Pointer to the hash table [khash_t(name)*] + */ +#define kh_init(name) kh_init_##name() + +/*! @function + @abstract Destroy a hash table. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + */ +#define kh_destroy(name, h) kh_destroy_##name(h) + +/*! @function + @abstract Reset a hash table without deallocating memory. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + */ +#define kh_clear(name, h) kh_clear_##name(h) + +/*! @function + @abstract Resize a hash table. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + @param s New size [khint_t] + */ +#define kh_resize(name, h, s) kh_resize_##name(h, s) + +/*! @function + @abstract Insert a key to the hash table. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + @param k Key [type of keys] + @param r Extra return code: -1 if the operation failed; + 0 if the key is present in the hash table; + 1 if the bucket is empty (never used); 2 if the element in + the bucket has been deleted [int*] + @return Iterator to the inserted element [khint_t] + */ +#define kh_put(name, h, k, r) kh_put_##name(h, k, r) + +/*! @function + @abstract Retrieve a key from the hash table. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + @param k Key [type of keys] + @return Iterator to the found element, or kh_end(h) if the element is absent [khint_t] + */ +#define kh_get(name, h, k) kh_get_##name(h, k) + +/*! @function + @abstract Remove a key from the hash table. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + @param k Iterator to the element to be deleted [khint_t] + */ +#define kh_del(name, h, k) kh_del_##name(h, k) + +/*! @function + @abstract Test whether a bucket contains data. + @param h Pointer to the hash table [khash_t(name)*] + @param x Iterator to the bucket [khint_t] + @return 1 if containing data; 0 otherwise [int] + */ +#define kh_exist(h, x) (!__ac_iseither((h)->flags, (x))) + +/*! @function + @abstract Get key given an iterator + @param h Pointer to the hash table [khash_t(name)*] + @param x Iterator to the bucket [khint_t] + @return Key [type of keys] + */ +#define kh_key(h, x) ((h)->keys[x]) + +/*! @function + @abstract Get value given an iterator + @param h Pointer to the hash table [khash_t(name)*] + @param x Iterator to the bucket [khint_t] + @return Value [type of values] + @discussion For hash sets, calling this results in segfault. + */ +#define kh_val(h, x) ((h)->vals[x]) + +/*! @function + @abstract Alias of kh_val() + */ +#define kh_value(h, x) ((h)->vals[x]) + +/*! @function + @abstract Get the start iterator + @param h Pointer to the hash table [khash_t(name)*] + @return The start iterator [khint_t] + */ +#define kh_begin(h) (khint_t)(0) + +/*! @function + @abstract Get the end iterator + @param h Pointer to the hash table [khash_t(name)*] + @return The end iterator [khint_t] + */ +#define kh_end(h) ((h)->n_buckets) + +/*! @function + @abstract Get the number of elements in the hash table + @param h Pointer to the hash table [khash_t(name)*] + @return Number of elements in the hash table [khint_t] + */ +#define kh_size(h) ((h)->size) + +/*! @function + @abstract Get the number of buckets in the hash table + @param h Pointer to the hash table [khash_t(name)*] + @return Number of buckets in the hash table [khint_t] + */ +#define kh_n_buckets(h) ((h)->n_buckets) + +/*! @function + @abstract Iterate over the entries in the hash table + @param h Pointer to the hash table [khash_t(name)*] + @param kvar Variable to which key will be assigned + @param vvar Variable to which value will be assigned + @param code Block of code to execute + */ +#define kh_foreach(h, kvar, vvar, code) { khint_t __i; \ + for (__i = kh_begin(h); __i != kh_end(h); ++__i) { \ + if (!kh_exist(h,__i)) continue; \ + (kvar) = kh_key(h,__i); \ + (vvar) = kh_val(h,__i); \ + code; \ + } } + +/*! @function + @abstract Iterate over the values in the hash table + @param h Pointer to the hash table [khash_t(name)*] + @param vvar Variable to which value will be assigned + @param code Block of code to execute + */ +#define kh_foreach_value(h, vvar, code) { khint_t __i; \ + for (__i = kh_begin(h); __i != kh_end(h); ++__i) { \ + if (!kh_exist(h,__i)) continue; \ + (vvar) = kh_val(h,__i); \ + code; \ + } } + +/* More conenient interfaces */ + +/*! @function + @abstract Instantiate a hash set containing integer keys + @param name Name of the hash table [symbol] + */ +#define KHASH_SET_INIT_INT(name) \ + KHASH_INIT(name, khint32_t, char, 0, kh_int_hash_func, kh_int_hash_equal) + +/*! @function + @abstract Instantiate a hash map containing integer keys + @param name Name of the hash table [symbol] + @param khval_t Type of values [type] + */ +#define KHASH_MAP_INIT_INT(name, khval_t) \ + KHASH_INIT(name, khint32_t, khval_t, 1, kh_int_hash_func, kh_int_hash_equal) + +/*! @function + @abstract Instantiate a hash map containing 64-bit integer keys + @param name Name of the hash table [symbol] + */ +#define KHASH_SET_INIT_INT64(name) \ + KHASH_INIT(name, khint64_t, char, 0, kh_int64_hash_func, kh_int64_hash_equal) + +/*! @function + @abstract Instantiate a hash map containing 64-bit integer keys + @param name Name of the hash table [symbol] + @param khval_t Type of values [type] + */ +#define KHASH_MAP_INIT_INT64(name, khval_t) \ + KHASH_INIT(name, khint64_t, khval_t, 1, kh_int64_hash_func, kh_int64_hash_equal) + +typedef const char *kh_cstr_t; +/*! @function + @abstract Instantiate a hash map containing const char* keys + @param name Name of the hash table [symbol] + */ +#define KHASH_SET_INIT_STR(name) \ + KHASH_INIT(name, kh_cstr_t, char, 0, kh_str_hash_func, kh_str_hash_equal) + +/*! @function + @abstract Instantiate a hash map containing const char* keys + @param name Name of the hash table [symbol] + @param khval_t Type of values [type] + */ +#define KHASH_MAP_INIT_STR(name, khval_t) \ + KHASH_INIT(name, kh_cstr_t, khval_t, 1, kh_str_hash_func, kh_str_hash_equal) + +#endif /* __AC_KHASH_H */ diff --git a/include/seiscomp/broker/utils/utils.h b/include/seiscomp/broker/utils/utils.h new file mode 100644 index 0000000..201dccd --- /dev/null +++ b/include/seiscomp/broker/utils/utils.h @@ -0,0 +1,442 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * Author: Jan Becker * + * Email: jabe@gempa.de * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef GEMPA_MESSAGESERVER_UTILS_H__ +#define GEMPA_MESSAGESERVER_UTILS_H__ + + +#include +#include + +#include +#include +#include +#include +#include +#include +#include + + +namespace Seiscomp { +namespace Utils { + + +/** + * @brief The Randomizer class generated random data of arbitrary length. + * + * This class utilized /dev/urandom under Unix. Other operating systems are + * not yet supported. Randomizer is implemented as a singleton. The usage + * is as simple as: + * + * \code + * if ( !Randomizer::Instance().fillData(data, len) ) + * cerr << "Failed to generate random data" << endl; + * \endcode + * + * A helper template method Randomizer::fill is provided which takes an + * argument of arbitrary type and fills it with random data. + * + * \code + * int id; + * if ( !Randomizer::Instance().fill(id) ) + * cerr << "Failed to generate id" << endl; + * \endcode + */ +class SC_BROKER_API Randomizer { + // ---------------------------------------------------------------------- + // Destruction + // ---------------------------------------------------------------------- + public: + //! D'tor + ~Randomizer(); + + + // ---------------------------------------------------------------------- + // Public interface + // ---------------------------------------------------------------------- + public: + /** + * @brief Returns the singleton instance. + * @return The singleton instance + */ + static Randomizer &Instance() { return _instance; } + + /** + * @brief Fills a value with random data. + * @param target The value to be filled. + * @return true on success, false otherwise + */ + template + bool fill(T &target); + + /** + * @brief Fills a block of data with random data + * @param data The pointer to the memory block + * @param len The length in bytes of the memory block + * @return true on success, false otherwise + */ + bool fillData(void *data, size_t len); + + + // ---------------------------------------------------------------------- + // Private interface + // ---------------------------------------------------------------------- + private: + //! Private constructor + Randomizer(); + + + // ---------------------------------------------------------------------- + // Private members + // ---------------------------------------------------------------------- + private: + static Randomizer _instance; + FILE *_randomFd; +}; + + +template +bool Randomizer::fill(T &target) { + return fillData(&target, sizeof(target)); +} + + +template +class BlockingDequeue : private boost::noncopyable { + // ---------------------------------------------------------------------- + // Public types + // ---------------------------------------------------------------------- + public: + typedef std::unique_lock lock; + + // ---------------------------------------------------------------------- + // X'truction + // ---------------------------------------------------------------------- + public: + BlockingDequeue(); + BlockingDequeue(int n); + ~BlockingDequeue(); + + + // ---------------------------------------------------------------------- + // Blocking interface + // ---------------------------------------------------------------------- + public: + void resize(int n); + + bool canPush() const; + bool push(T v); + + bool canPop() const; + T pop(); + + bool pop(T &); + + void close(); + void reopen(); + + size_t size() const; + + void lockBuffer(); + void unlockBuffer(); + + //! Requires lockBuffer to be called + size_t buffered() const; + + //! Requires lockBuffer to be called + T &operator[](size_t idx); + + + // ---------------------------------------------------------------------- + // Private members + // ---------------------------------------------------------------------- + private: + volatile int _begin, _end; + volatile size_t _buffered; + volatile bool _closed; + std::vector _buffer; + std::condition_variable _notFull, _notEmpty; + mutable std::mutex _monitor; +}; + + + +template +struct BlockingDequeueHelper {}; + +template +struct BlockingDequeueHelper { + static void clean(const std::vector &) {} + static T defaultValue() { return T(); } +}; + +template +struct BlockingDequeueHelper { + static void clean(const std::vector &b) { + for ( size_t i = 0; i < b.size(); ++i ) { + if ( b[i] ) delete b[i]; + } + } + + static T defaultValue() { return NULL; } +}; +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +BlockingDequeue::BlockingDequeue() : + _begin(0), _end(0), + _buffered(0), _closed(false), _buffer(0) +{} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +BlockingDequeue::BlockingDequeue(int n) : + _begin(0), _end(0), + _buffered(0), _closed(false), _buffer(n) +{} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +BlockingDequeue::~BlockingDequeue() { + close(); + BlockingDequeueHelper::value>::clean(_buffer); +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +void BlockingDequeue::resize(int n) { + lock lk(_monitor); + _buffer.resize(n); +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +bool BlockingDequeue::canPush() const { + lock lk(_monitor); + + if ( _closed ) + throw Core::GeneralException("Queue has been closed"); + + return _buffered < _buffer.size(); +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +bool BlockingDequeue::push(T v) { + lock lk(_monitor); + while (_buffered == _buffer.size() && !_closed) + _notFull.wait(lk); + if ( _closed ) { + _notEmpty.notify_all(); + return false; + } + _buffer[_end] = v; + _end = (_end+1) % _buffer.size(); + ++_buffered; + _notEmpty.notify_all(); + return true; +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +bool BlockingDequeue::canPop() const { + lock lk(_monitor); + + if ( _closed ) + throw Core::GeneralException("Queue has been closed"); + + return _buffered > 0; +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +T BlockingDequeue::pop() { + lock lk(_monitor); + while (_buffered == 0 && !_closed) { + _notEmpty.wait(lk); + } + if ( _closed ) + throw Core::GeneralException("Queue has been closed"); + T v = _buffer[_begin]; + _buffer[_begin] = BlockingDequeueHelper::value>::defaultValue(); + _begin = (_begin+1) % _buffer.size(); + --_buffered; + _notFull.notify_all(); + return v; +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +bool BlockingDequeue::pop(T &v) { + lock lk(_monitor); + + if ( _closed ) + throw Core::GeneralException("Queue has been closed"); + + if ( _buffered > 0 ) { + v = _buffer[_begin]; + _buffer[_begin] = BlockingDequeueHelper::value>::defaultValue(); + _begin = (_begin+1) % _buffer.size(); + --_buffered; + _notFull.notify_all(); + return true; + } + else + return false; +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +void BlockingDequeue::close() { + lock lk(_monitor); + if ( _closed ) return; + _closed = true; + _notFull.notify_all(); + _notEmpty.notify_all(); +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +void BlockingDequeue::reopen() { + lock lk(_monitor); + _closed = false; + if ( !_buffered ) + _notFull.notify_all(); + else + _notEmpty.notify_all(); +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +size_t BlockingDequeue::size() const { + lock lk(_monitor); + return _buffered; +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +void BlockingDequeue::lockBuffer() { + _monitor.lock(); +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +void BlockingDequeue::unlockBuffer() { + _monitor.unlock(); +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +size_t BlockingDequeue::buffered() const { + return _buffered; +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +T &BlockingDequeue::operator[](size_t idx) { + idx += _begin; + if ( idx >= _buffer.size() ) + idx -= _buffer.size(); + + return _buffer[idx]; +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +} +} + + +#endif diff --git a/include/seiscomp/client.h b/include/seiscomp/client.h new file mode 100644 index 0000000..a8dfc8f --- /dev/null +++ b/include/seiscomp/client.h @@ -0,0 +1,17 @@ +#ifndef SC_SYSTEM_CLIENT_API_H +#define SC_SYSTEM_CLIENT_API_H + +#if defined(WIN32) && (defined(SC_SYSTEM_CLIENT_SHARED) || defined(SC_ALL_SHARED)) +# if defined(SC_SYSTEM_CLIENT_EXPORTS) +# define SC_SYSTEM_CLIENT_API __declspec(dllexport) +# define SC_SYSTEM_CLIENT_TEMPLATE_EXPORT +# else +# define SC_SYSTEM_CLIENT_API __declspec(dllimport) +# define SC_SYSTEM_CLIENT_TEMPLATE_EXPORT extern +# endif +#else +# define SC_SYSTEM_CLIENT_API +# define SC_SYSTEM_CLIENT_TEMPLATE_EXPORT +#endif + +#endif diff --git a/include/seiscomp/client/application.h b/include/seiscomp/client/application.h new file mode 100644 index 0000000..39633c6 --- /dev/null +++ b/include/seiscomp/client/application.h @@ -0,0 +1,762 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef SEISCOMP_CLIENT_APPLICATION_H +#define SEISCOMP_CLIENT_APPLICATION_H + + +#include + +#include + +#include +#include +#include +#include + +#include +#include +#include + +#include + +#include + +#include +#include + +#include +#include +#include + + +#define SCCoreApp (Seiscomp::Client::Application::Instance()) + + +namespace Seiscomp { + +namespace Logging { + class Output; +} + +namespace Client { + + +MAKEENUM( + ApplicationStatus, + EVALUES( + STARTED, + FINISHED + ), + ENAMES( + "started", + "finished" + ) +); + + +class SC_SYSTEM_CLIENT_API ApplicationStatusMessage : public Core::Message { + DECLARE_SC_CLASS(ApplicationStatusMessage); + DECLARE_SERIALIZATION; + + public: + ApplicationStatusMessage(); + ApplicationStatusMessage(const std::string &module, + ApplicationStatus status); + + ApplicationStatusMessage(const std::string &module, + const std::string &username, + ApplicationStatus status); + + + public: + virtual bool empty() const; + + const std::string &module() const; + const std::string &username() const; + ApplicationStatus status() const; + + + private: + std::string _module; + std::string _username; + ApplicationStatus _status; +}; + + +struct SC_SYSTEM_CLIENT_API Notification { + //! Declares the application internal notification types. + //! Custom types can be used with negative values. + enum Type { + Object, + Disconnect, + Reconnect, + Close, + Timeout, + AcquisitionFinished + }; + + Notification() : object(nullptr), type(Object) {} + Notification(Core::BaseObject * o) : object(o), type(Object) {} + Notification(int t) : object(nullptr), type(t) {} + Notification(int t, Core::BaseObject * o) : object(o), type(t) {} + + Core::BaseObject *object; + int type; +}; + + +/** + * \brief Application class to write commandline clients easily which are + * connected to the messaging and need database access. + * + * In addition to @ref System::Application it adds the method + * @ref handleMessage which must be implemented to handle receives + * messages. An additional abstraction layer is implemented which already + * checks the message for notifier objects, extracts them and calls respective + * callbacks: + * * @ref addObject() + * * @ref removeObject() + * * @ref updateObject() + */ +class SC_SYSTEM_CLIENT_API Application : public System::Application { + // ---------------------------------------------------------------------- + // Public types + // ---------------------------------------------------------------------- + public: + typedef ObjectMonitor::Log ObjectLog; + + //! Initialization stages used when reporting errors + enum ClientStage { + MESSAGING = System::Application::ST_QUANTITY, + DATABASE = System::Application::ST_QUANTITY + 1, + CST_QUANTITY + }; + + + // ---------------------------------------------------------------------- + // X'truction + // ---------------------------------------------------------------------- + public: + Application(int argc, char **argv); + ~Application(); + + + // ---------------------------------------------------------------------- + // Public functions + // ---------------------------------------------------------------------- + public: + //! Returns the configured agencyID + const std::string &agencyID() const; + + //! Returns the configured author + const std::string &author() const; + + /** + * Returns according to the configured white- and blacklist of + * agencyID's whether the passed agencyID is allowed or not + * @param agencyID The agencyID to check + * @return The boolean result + */ + bool isAgencyIDAllowed(const std::string &agencyID) const; + + /** + * Returns !isAgencyIDAllowed(agencyID) + * @param agencyID The agencyID to check + * @return !isAgencyIDAllowed(agencyID) + */ + bool isAgencyIDBlocked(const std::string &agencyID) const; + + /** + * Exit the application and set the returnCode. + * @param returnCode The value returned from exec() + */ + virtual void exit(int returnCode); + + //! Returns the application's messaging connection interface + Client::Connection *connection() const; + + //! Returns the configured database type + const std::string &databaseType() const; + + //! Returns the configured database connection parameters + const std::string &databaseParameters() const; + + //! Returns the application's database interface + IO::DatabaseInterface *database() const; + + //! Returns the application's database URI + const std::string &databaseURI() const; + + //! Returns the application's database query interface + DataModel::DatabaseQuery *query() const; + + //! Returns the configures recordstream URL to be used by + //! RecordStream::Open() + const std::string &recordStreamURL() const; + + //! Returns the list of configured points of interest + const std::vector &cities() const; + + //! Returns the nearest city with respect to lat/lon and + //! a given maximum distance and minimum population + const Math::Geo::CityD *nearestCity(double lat, double lon, + double maxDist, double minPopulation, + double *dist, double *azi) const; + + //! Returns the config module object if available + DataModel::ConfigModule *configModule() const; + + //! Returns the state of a station + bool isStationEnabled(const std::string& networkCode, + const std::string& stationCode); + + //! Returns the messaging-server + const std::string &messagingURL() const; + + + //! Returns the filename of the OpenSSL certificate or + //! the certificate data Base64 encoded. The Base64 encoded + //! data starts with the special DataTag. + //! If no certificate is used the method returns an empty + //! string. + const std::string &messagingCertificate() const; + + //! Enables a timer that calls every n seconds the + //! handleTimeout() methods + //! A value of 0 seconds disables the timer + void enableTimer(unsigned int seconds); + + //! Disables the timer + void disableTimer(); + + //! Sends a notification to the application. If used in derived + //! classes to send custom notifications use negative notification + //! types and reimplement dispatchNotification(...). + void sendNotification(const Notification &); + + bool waitEvent(); + + + // ---------------------------------------------------------------------- + // Initialization configuration methods + // These methods have to be called before the init() method. + // ---------------------------------------------------------------------- + public: + //! Sets the primary messaging group + void setPrimaryMessagingGroup(const std::string&); + + //! Returns the set primary messaging group + const std::string &primaryMessagingGroup() const; + + //! Sets the username used for the messaging connection + void setMessagingUsername(const std::string&); + + /** + * Adds a group to subscribe to. This is only a default group. + * If another group or groups are given via commandline or config + * file this subscription will be overriden completely. + */ + void addMessagingSubscription(const std::string&); + + //! Initialize the database, default = true, true + void setDatabaseEnabled(bool enable, bool tryToFetch); + bool isDatabaseEnabled() const; + + //! Returns whether the inventory should be loaded from a + //! file (false) or from the database (true) + bool isInventoryDatabaseEnabled() const; + + //! Returns whether the config module should be loaded from a + //! file (false) or from the database (true) + bool isConfigDatabaseEnabled() const; + + //! Initialize the messaging, default = true + void setMessagingEnabled(bool enable); + bool isMessagingEnabled() const; + + /** + * @brief Toggles receiption of messaging membership messages. The + * default is false. + * @param enable Flag + */ + void setMembershipMessagesEnabled(bool enable); + bool areMembershipMessagesEnabled() const; + + //! Enables/disables sending of start/stop messages. + //! If enabled, a start message (at startup) and a + //! stop message (at shutdown) will be sent to the + //! STATUS group. Default = false + void setStartStopMessagesEnabled(bool enable); + bool areStartStopMessagesEnabled() const; + + //! Enables/disables auto shutdown caused by + //! the shutdown of a definable master module or + //! master username. If both values are set the + //! one coming first is used. + void setAutoShutdownEnabled(bool enable); + bool isAutoShutdownEnabled() const; + + //! Enables recordstream URL option, default = true + void setRecordStreamEnabled(bool enable); + bool isRecordStreamEnabled() const; + + //! Load the stations from the inventory at startup, default = false + void setLoadStationsEnabled(bool enable); + bool isLoadStationsEnabled() const; + + //! Load the complete inventory at startup, default = false + void setLoadInventoryEnabled(bool enable); + bool isLoadInventoryEnabled() const; + + //! Load the configmodule from the database at startup, default = false + void setLoadConfigModuleEnabled(bool enable); + bool isLoadConfigModuleEnabled() const; + + //! Load the cities.xml file, default = false + void setLoadCitiesEnabled(bool enable); + bool isLoadCitiesEnabled() const; + + //! Load the custom defined fep regions in ~/.seiscomp/fep or + //! ~/seiscomp/trunk/share/fep, default = false + void setLoadRegionsEnabled(bool enable); + bool isLoadRegionsEnabled() const; + + //! Sets whether the received notifier are applied automatically + //! or not, default: true + + /** + * Sets whether the received notifier are applied automatically + * or not, default: true + * When AutoApplyNotifier is enabled a received message will + * be handled in two passes: + * 1. pass: Apply all attached notifier + * 2. pass: Interpret all notifier + * + * So when using an object in an interprete callback it is + * garantueed that all child objects that also has been sent + * inside the message are attached to it. + */ + void setAutoApplyNotifierEnabled(bool enable); + bool isAutoApplyNotifierEnabled() const; + + /** + * Sets whether the received notifier will be interpreted or not. + * Default: true + * When this option is enabled, the callback methods + * addObject(), updateObject() and removeObject() will be + * called after a notifier has been received. + */ + void setInterpretNotifierEnabled(bool enable); + bool isInterpretNotifierEnabled() const; + + /** Returns whether a custom publicID pattern has been configured + or not */ + bool hasCustomPublicIDPattern() const; + + /** + * Sets the number of retries if a connection fails. + * The default value is 0xFFFFFFFF and should be understood + * as "keep on trying". + */ + void setConnectionRetries(unsigned int); + + //! Sets the config module name to use when reading + //! the database configuration. An empty module name + //! means: read all available modules. + //! The default module is "trunk". + void setConfigModuleName(const std::string &module); + const std::string &configModuleName() const; + + //! Sets the master module used when auto shutdown + //! is activated. + void setShutdownMasterModule(const std::string &module); + + //! Sets the master username used when auto shutdown + //! is activated. + void setShutdownMasterUsername(const std::string &username); + + + // ---------------------------------------------------------------------- + // Public methods + // These methods have to be called after the init() method. + // ---------------------------------------------------------------------- + public: + /** + * Adds a logger for an input object flow. + * This method must be called after Application::init(). + * The returned pointer is managed by the Application and must not + * be deleted. + */ + ObjectLog * + addInputObjectLog(const std::string &name, + const std::string &channel = ""); + + /** + * Adds a logger for an output object flow. + * This method must be called after Application::init(). + * The returned pointer is managed by the Application and must not + * be deleted. + */ + ObjectLog * + addOutputObjectLog(const std::string &name, + const std::string &channel = ""); + + /** + * Logs input/output object throughput. + * @param log Pointer returned by addInputObjectLog or addOutputObjectLog + * @param timestamp The timestamp to be logged + */ + void logObject(ObjectLog *log, const Core::Time ×tamp, + size_t count = 1); + + /** + * Reloads the application inventory from either an XML file or + * the database. + */ + bool reloadInventory(); + + /** + * Reloads the application configuration (bindings) from either an + * XML file or the database. + */ + bool reloadBindings(); + + /** + * @brief Injects a message from outside. The message will actually + * take the same path as when it would have been received via + * the messaging. + * @param msg The message. The ownership if left to the caller. + * @param pkt The optional network packet. The ownership is left to + * the caller. + */ + void injectMessage(Core::Message *msg, Packet *pkt = nullptr); + + /** + * @brief Routes a notifier to either add/update or removeObject. + * @param notifier The notifier pointer which must not be nullptr + */ + void handleNotifier(DataModel::Notifier *notifier); + + + // ---------------------------------------------------------------------- + // Static public members + // ---------------------------------------------------------------------- + public: + //! Returns the pointer to the application's instance. + static Application *Instance(); + + + // ---------------------------------------------------------------------- + // Protected functions + // ---------------------------------------------------------------------- + protected: + virtual bool validateParameters() override; + virtual bool handlePreFork() override; + + virtual bool init() override; + + /** + * Starts the mainloop until exit() or quit() is called. + * The default implementation waits for messages in blocking mode + * and calls handleMessage() whenever a new message arrives. + */ + virtual bool run() override; + + //! This method gets called when all messages has been read or + //! the connection is invalid + virtual void idle(); + + //! Cleanup method called before exec() returns. + virtual void done() override; + + //! Opens the configuration file and reads the state variables + virtual bool initConfiguration() override; + + //! Initialized the database + virtual bool initDatabase(); + + //! Sets the database interface and creates a database query object + void setDatabase(IO::DatabaseInterface* db); + + /** + * Reads the requested subscriptions from the configuration file + * and apply them to the messaging connection. + */ + virtual bool initSubscriptions(); + + const std::set &subscribedGroups() const; + + /** + * Called when the application received the AcquisitionFinished event. + * This is most likely send from the readRecords thread of the + * StreamApplication. The default implementation does nothing. + */ + virtual void handleEndAcquisition(); + + + // ---------------------------------------------------------------------- + // Messaging handlers + // ---------------------------------------------------------------------- + protected: + virtual bool dispatch(Core::BaseObject*); + + //! Custom dispatch method for notifications with negative (< 0) + //! types. The default implementation return false. + virtual bool dispatchNotification(int type, Core::BaseObject*); + + /** + * Reads messages from the connection. + * @return true, if successfull, false if not. When returning false, + * the mainloop will stop and the program is going to + * terminate. + */ + bool readMessages(); + + /** + * This method gets called when a previously started timer timeout's. + * The timer has to be started by enableTimer(timeout). + */ + virtual void handleTimeout(); + + /** + * This method is called when close event is sent to the application. + * The default handler returns true and causes the event queue to + * shutdown and to exit the application. + * It false is returned the close event is ignored. + */ + virtual bool handleClose(); + + /** + * This methods gets called when an auto shutdown has been + * initiated. The default implementation just quits. + */ + virtual void handleAutoShutdown(); + + /** + * This methods gets called when an the log interval is reached + * and the application should prepare its logging information. This + * method can be used to sync logs. + * The default implementation does nothing. + */ + virtual void handleMonitorLog(const Core::Time ×tamp); + + /** + * This method gets called after the connection got lost. + */ + virtual void handleDisconnect(); + + /** + * This method gets called after the connection got reestablished. + */ + virtual void handleReconnect(); + + /** + * Handles receiption of a network packet which is a candidate + * for message decoding. Special service messages such as ENTER or + * LEAVE will not cause a message to be created. This method is always + * called *before* a message should be handled. + */ + virtual void handleNetworkMessage(const Client::Packet *msg); + + /** + * This method gets called whenever a new message arrives. Derived + * classes have to implement this method to receive messages. + * To enable autoapplying and notifier interpreting call this method + * inside the reimplemented version. + * @param msg The message. A smartpointer may be stored for + * future use. The pointer must not be deleted! + */ + virtual void handleMessage(Core::Message *msg); + + //! Callback for interpret notifier + virtual void addObject(const std::string &parentID, DataModel::Object*) {} + + //! Callback for interpret notifier + virtual void removeObject(const std::string &parentID, DataModel::Object*) {} + + //! Callback for interpret notifier + virtual void updateObject(const std::string &parentID, DataModel::Object*) {} + + + // ---------------------------------------------------------------------- + // Private functions + // ---------------------------------------------------------------------- + private: + bool initMessaging(); + + bool loadConfig(const std::string &configDB); + bool loadInventory(const std::string &inventoryDB); + + void startMessageThread(); + void runMessageThread(); + + bool processEvent(); + + void timeout(); + + void monitorLog(const Core::Time ×tamp, std::ostream &os); + + + // ---------------------------------------------------------------------- + // Implementation + // ---------------------------------------------------------------------- + protected: + DataModel::DatabaseQueryPtr _query; + DataModel::ConfigModulePtr _configModule; + + std::vector _cities; + std::set _messagingSubscriptions; + + + private: + static Application *_instance; + + + protected: + using StringVector = std::vector; + + struct AppSettings : AbstractSettings { + int objectLogTimeWindow{60}; + + std::string agencyID{"UNSET"}; + std::string author{"@appname@@@@hostname@"}; + + bool enableLoadRegions{false}; + std::string customPublicIDPattern; + std::string configModuleName{"trunk"}; + + bool enableFetchDatabase{true}; + bool enableLoadStations{false}; + bool enableLoadInventory{false}; + bool enableLoadConfigModule{false}; + bool enableAutoApplyNotifier{true}; + bool enableInterpretNotifier{true}; + + unsigned int retryCount{0xFFFFFFFF}; + + Util::StringFirewall networkTypeFirewall; + Util::StringFirewall stationTypeFirewall; + + struct Database { + void accept(SettingsLinker &linker); + + bool enable{true}; + bool showDrivers{false}; + + std::string type; + std::string parameters; + std::string URI; + + std::string inventoryDB; + std::string configDB; + } database; + + struct Inventory { + void accept(SettingsLinker &linker); + + StringVector netTypeWhitelist; + StringVector netTypeBlacklist; + StringVector staTypeWhitelist; + StringVector staTypeBlacklist; + } inventory; + + // Messaging + struct Messaging { + void accept(SettingsLinker &linker); + + bool enable{true}; + bool membershipMessages{false}; + + std::string user; + std::string URL{"localhost/production"}; + std::string primaryGroup{Protocol::LISTENER_GROUP}; + std::string contentType; + unsigned int timeout{3}; + std::string certificate; + + StringVector subscriptions; + + } messaging; + + struct Client { + void accept(SettingsLinker &linker); + + bool startStopMessages{false}; + bool autoShutdown{false}; + std::string shutdownMasterModule; + std::string shutdownMasterUsername; + } client; + + struct RecordStream { + void accept(SettingsLinker &linker); + + bool enable{false}; + bool showDrivers{false}; + + std::string URI; + std::string file; + std::string fileType; + } recordstream; + + struct Processing { + void accept(SettingsLinker &linker); + + StringVector agencyWhitelist; + StringVector agencyBlacklist; + Util::StringFirewall firewall; + } processing; + + struct Cities { + void accept(SettingsLinker &linker); + + bool enable{false}; + std::string db; + } cities; + + void accept(SettingsLinker &linker) override; + }; + + AppSettings _settings; + + ObjectMonitor *_inputMonitor; + ObjectMonitor *_outputMonitor; + + ThreadedQueue _queue; + std::thread *_messageThread; + + ConnectionPtr _connection; + IO::DatabaseInterfacePtr _database; + Util::Timer _userTimer; + + std::mutex _objectLogMutex; +}; + + +inline bool Application::waitEvent() { + return processEvent(); +} + + +} +} + + +#endif diff --git a/include/seiscomp/client/configdb.h b/include/seiscomp/client/configdb.h new file mode 100644 index 0000000..8d6ba8a --- /dev/null +++ b/include/seiscomp/client/configdb.h @@ -0,0 +1,81 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef SEISCOMP_CLIENT_CONFIG_H +#define SEISCOMP_CLIENT_CONFIG_H + + +#include +#include +#include +#include +#include +#include + +#include +#include + + +namespace Seiscomp { +namespace Client { + + +class SC_SYSTEM_CLIENT_API ConfigDB { + private: + ConfigDB(); + + public: + static ConfigDB* Instance(); + static void Reset(); + + void load(DataModel::DatabaseReader* reader, + const OPT(std::string)& moduleName = Seiscomp::Core::None, + const OPT(std::string)& networkCode = Seiscomp::Core::None, + const OPT(std::string)& stationCode = Seiscomp::Core::None, + const OPT(std::string)& setupName = Seiscomp::Core::None, + const std::set& parameterNames = std::set()); + + void load(const char *xml); + + DataModel::Config *config(); + + private: + std::map _configModules; + std::map _configStations; + std::map _parameterSets; + DataModel::ConfigPtr _config; + static ConfigDB *_instance; + + DataModel::DatabaseIterator getConfigObjects(DataModel::DatabaseReader* reader, + const Core::RTTI& classType, + const OPT(std::string)& moduleName, + const OPT(std::string)& networkCode, + const OPT(std::string)& stationCode, + const OPT(std::string)& setupName, + const std::set& parameterNames); +}; + + +} +} + + +#endif + diff --git a/include/seiscomp/client/inventory.h b/include/seiscomp/client/inventory.h new file mode 100644 index 0000000..7e076e1 --- /dev/null +++ b/include/seiscomp/client/inventory.h @@ -0,0 +1,156 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef SEISCOMP_CLIENT_INVENTORY_H +#define SEISCOMP_CLIENT_INVENTORY_H + + +#include +#include +#include +#include +#include +#include + +#include +#include + + +namespace Seiscomp { +namespace Client { + + +struct SC_SYSTEM_CLIENT_API StationLocation { + StationLocation(); + StationLocation(double lat, double lon, double elevation); + + double latitude; + double longitude; + double elevation; +}; + + +typedef std::vector StationList; + +class SC_SYSTEM_CLIENT_API Inventory { + // ---------------------------------------------------------------------- + // X'truction + // ---------------------------------------------------------------------- + private: + //! Private c'tor. This class implements the singleton pattern and + //! can be accessed through the static Instance() method. + Inventory(); + + + // ---------------------------------------------------------------------- + // Public interface + // ---------------------------------------------------------------------- + public: + static Inventory* Instance(); + static void Reset(); + + void load(const char *filename); + void load(DataModel::DatabaseReader*); + void setInventory(DataModel::Inventory*); + + int filter(const Util::StringFirewall *networkTypeFW, + const Util::StringFirewall *stationTypeFW); + + void loadStations(DataModel::DatabaseReader*); + + //! Returns the station location for a network- and stationcode and + //! a time. If the station has not been found a ValueException will + //! be thrown. + StationLocation stationLocation(const std::string& networkCode, + const std::string& stationCode, + const Core::Time&) const; + + //! Returns the station for a network- and stationcode and + //! a time. If the station has not been found nullptr will be returned. + DataModel::Station* getStation(const std::string &networkCode, + const std::string &stationCode, + const Core::Time &, + DataModel::InventoryError *error = nullptr) const; + + //! Returns the sensorlocation for a network-, station- and locationcode and + //! a time. If the sensorlocation has not been found nullptr will be returned. + DataModel::SensorLocation* getSensorLocation(const std::string &networkCode, + const std::string &stationCode, + const std::string &locationCode, + const Core::Time &, + DataModel::InventoryError *error = nullptr) const; + + //! Returns the stream for a network-, station-, location- and channelcode and + //! a time. If the stream has not been found nullptr will be returned. + DataModel::Stream* getStream(const std::string &networkCode, + const std::string &stationCode, + const std::string &locationCode, + const std::string &channelCode, + const Core::Time &, + DataModel::InventoryError *error = nullptr) const; + + //! Returns the three streams (vertical, horizontal1, horizontal2) corresponding + //! to the given network-, station-, location- and channel code + DataModel::ThreeComponents getThreeComponents(const std::string& networkCode, + const std::string& stationCode, + const std::string& locationCode, + const std::string& channelCode, + const Core::Time&) const; + + //! Returns the station used for a pick. If the station has not been found + //! nullptr will be returned. + DataModel::Station* getStation(const DataModel::Pick*) const; + + //! Returns the sensor location used for a pick. If the sensor location has + //! not been found nullptr will be returned. + DataModel::SensorLocation* getSensorLocation(const DataModel::Pick*) const; + + DataModel::Stream* getStream(const DataModel::Pick*) const; + + //! Returns the three streams (vertical, horizontal1, horizontal2) corresponding + //! to the picked stream. + DataModel::ThreeComponents getThreeComponents(const DataModel::Pick*) const; + + double getGain(const std::string& networkCode, + const std::string& stationCode, + const std::string& locationCode, + const std::string& channelCode, + const Core::Time&); + + //! Returns all defined stations for the given time + int getAllStations(StationList&, const Core::Time&); + + DataModel::Inventory* inventory(); + + + // ---------------------------------------------------------------------- + // Private members + // ---------------------------------------------------------------------- + private: + DataModel::InventoryPtr _inventory; + static Inventory _instance; +}; + + +} +} + + +#endif diff --git a/include/seiscomp/client/monitor.h b/include/seiscomp/client/monitor.h new file mode 100644 index 0000000..e34a36b --- /dev/null +++ b/include/seiscomp/client/monitor.h @@ -0,0 +1,114 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef SEISCOMP_UTILS_MONITOR_H +#define SEISCOMP_UTILS_MONITOR_H + +#include +#include +#include +#include +#include + + +namespace Seiscomp { +namespace Client { + + +//! A running average calculator that logs the number of +//! objects/thingies in a certain interval. The accuracy is +//! a second. +class SC_SYSTEM_CLIENT_API RunningAverage { + public: + RunningAverage(int timeSpanInSeconds); + + + public: + int timeSpan() const { return _timeSpan; } + + void push(const Core::Time &time, size_t count = 1); + + //! Returns the current count per time span. + int count(const Core::Time &time) const; + + //! Returns the value (average) per time span. + double value(const Core::Time &time) const; + + //! Returns the timestamp of the last values pushed + Core::Time last() const; + + void dumpBins() const; + + + private: + Core::Time _first; + Core::Time _last; + size_t _timeSpan; + double _scale; + mutable + double _shift; + mutable + std::vector _bins; + size_t _front; +}; + + +class SC_SYSTEM_CLIENT_API ObjectMonitor { + public: + typedef RunningAverage Log; + + ObjectMonitor(int timeSpanInSeconds); + ~ObjectMonitor(); + + + public: + Log *add(const std::string &name, const std::string &channel = ""); + void update(const Core::Time &time); + + + public: + struct Test { + std::string name; + std::string channel; + Core::Time updateTime; + size_t count; + Log *test; + }; + + typedef std::list Tests; + typedef Tests::const_iterator const_iterator; + + const_iterator begin() const; + const_iterator end() const; + + size_t size() const; + + + private: + Tests _tests; + int _timeSpan; +}; + + +} +} + + +#endif diff --git a/include/seiscomp/client/queue.h b/include/seiscomp/client/queue.h new file mode 100644 index 0000000..c9cf2e3 --- /dev/null +++ b/include/seiscomp/client/queue.h @@ -0,0 +1,157 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef SEISCOMP_CLIENT_QUEUE_H +#define SEISCOMP_CLIENT_QUEUE_H + + +#include +#include +#include + +#include + + +namespace Seiscomp { +namespace Client { + +class QueueClosedException : public Core::GeneralException { + public: + QueueClosedException() : Core::GeneralException("Queue has been closed") {} + QueueClosedException(const std::string& str ) : Core::GeneralException(str) {} +}; + +template +class ThreadedQueue { + // ---------------------------------------------------------------------- + // Public types + // ---------------------------------------------------------------------- + public: + typedef std::unique_lock lock; + + + // ---------------------------------------------------------------------- + // Non copyable + // ---------------------------------------------------------------------- + private: + ThreadedQueue(const ThreadedQueue&) = delete; + ThreadedQueue &operator=(const ThreadedQueue&) = delete; + + + // ---------------------------------------------------------------------- + // X'truction + // ---------------------------------------------------------------------- + public: + ThreadedQueue(); + ThreadedQueue(int n); + ~ThreadedQueue(); + + + // ---------------------------------------------------------------------- + // Interface + // ---------------------------------------------------------------------- + public: + /** + * @brief Resizes the queue to hold a maximum of n items before + * blocking. + * @param n The number of items to queue before blocking occurs. + */ + void resize(int n); + + /** + * @brief Checks whether the queue can take new items without blocking. + * @return true if non-blocking push is possible, false otherwise. + */ + bool canPush() const; + + /** + * @brief Appends a new item to the end of the queue. If the queue is + * full then it will block until a consumer has popped an item. + * @param v The new item. + * @return true if successful, false if queue is closed. + */ + bool push(T v); + + /** + * @brief Checks with equality operator if the item is already queued + * and if not, pushes it to the end of the queue. + * @param v The new item. + * @return true if successful which also covers the case that the item + * is already queued. False if the queue is closed. + */ + bool pushUnique(T v); + + /** + * @brief Checks whether an item can be popped or not. + * Actually it returns whether the queue is empty or not. + * @return true if not empty, false if empty. + */ + bool canPop() const; + + /** + * @brief Pops an items from the queue. If the queue is empty then + * it blocks until a producer pushed an item. + * @return The popped item. + */ + T pop(); + + /** + * @brief Close the queue and cause all subsequent calls to push and + * pop to fail. + */ + void close(); + + /** + * @brief Returns whether the queue is closed or not. + * @return The closed flag. + */ + bool isClosed() const; + + /** + * @brief Query the number of queued items. + * @return The number of currently queued items. + */ + size_t size() const; + + /** + * @brief Resets the queue which incorporates resetting the buffer + * insertations and the closed state. + */ + void reset(); + + + // ---------------------------------------------------------------------- + // Private members + // ---------------------------------------------------------------------- + private: + volatile int _begin, _end; + volatile size_t _buffered; + volatile bool _closed; + std::vector _buffer; + std::condition_variable _notFull, _notEmpty; + mutable std::mutex _monitor; +}; + + +} +} + + +#endif diff --git a/include/seiscomp/client/queue.ipp b/include/seiscomp/client/queue.ipp new file mode 100644 index 0000000..207fa42 --- /dev/null +++ b/include/seiscomp/client/queue.ipp @@ -0,0 +1,266 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + +#ifndef SEISCOMP_CLIENT_QUEUE_IPP +#define SEISCOMP_CLIENT_QUEUE_IPP + + +#include + +#include +#include + + +namespace Seiscomp { +namespace Client { + + +namespace { + +template +struct QueueHelper {}; + +template +struct QueueHelper { + static void clean(const std::vector &) {} + static T defaultValue() { return T(); } +}; + +template +struct QueueHelper { + static void clean(const std::vector &b) { + for ( size_t i = 0; i < b.size(); ++i ) { + if ( b[i] ) delete b[i]; + } + } + + static T defaultValue() { return nullptr; } +}; + +} + +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +ThreadedQueue::ThreadedQueue() : + _begin(0), _end(0), + _buffered(0), _closed(false), _buffer(0) +{} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +ThreadedQueue::ThreadedQueue(int n) : + _begin(0), _end(0), + _buffered(0), _closed(false), _buffer(n) +{} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +ThreadedQueue::~ThreadedQueue() { + close(); + QueueHelper::value>::clean(_buffer); +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +void ThreadedQueue::resize(int n) { + lock lk(_monitor); + _buffer.resize(n); +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +bool ThreadedQueue::canPush() const { + lock lk(_monitor); + + if ( _closed ) + throw QueueClosedException(); + + return _buffered < _buffer.size(); +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +bool ThreadedQueue::push(T v) { + lock lk(_monitor); + while (_buffered == _buffer.size() && !_closed) + _notFull.wait(lk); + if ( _closed ) { + _notEmpty.notify_all(); + return false; + } + _buffer[_end] = v; + _end = (_end+1) % _buffer.size(); + ++_buffered; + _notEmpty.notify_all(); + return true; +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +bool ThreadedQueue::pushUnique(T v) { + lock lk(_monitor); + // Find existing item + auto it = _begin; + while ( it != _end ) { + if ( _buffer[it] == v ) { + return true; + } + it = (it + 1) % _buffer.size(); + } + + while (_buffered == _buffer.size() && !_closed) + _notFull.wait(lk); + if ( _closed ) { + _notEmpty.notify_all(); + return false; + } + _buffer[_end] = v; + _end = (_end+1) % _buffer.size(); + ++_buffered; + _notEmpty.notify_all(); + return true; +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +bool ThreadedQueue::canPop() const { + lock lk(_monitor); + + if ( _closed ) + throw QueueClosedException(); + + return _buffered > 0; +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +T ThreadedQueue::pop() { + lock lk(_monitor); + while (_buffered == 0 && !_closed) { + _notEmpty.wait(lk); + } + if ( _closed ) + throw QueueClosedException(); + T v = _buffer[_begin]; + _buffer[_begin] = QueueHelper::value>::defaultValue(); + _begin = (_begin+1) % _buffer.size(); + --_buffered; + _notFull.notify_all(); + return v; +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +void ThreadedQueue::close() { + lock lk(_monitor); + if ( _closed ) return; + _closed = true; + _notFull.notify_all(); + _notEmpty.notify_all(); +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +bool ThreadedQueue::isClosed() const { + lock lk(_monitor); + return _closed; +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +size_t ThreadedQueue::size() const { + lock lk(_monitor); + return _buffered; +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +template +void ThreadedQueue::reset() { + lock lk(_monitor); + _closed = false; + _begin = _end = 0; + _buffered = 0; + QueueHelper::value>::clean(_buffer); + std::fill(_buffer.begin(), _buffer.end(), QueueHelper::value>::defaultValue()); +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +} +} + +#endif diff --git a/include/seiscomp/client/streamapplication.h b/include/seiscomp/client/streamapplication.h new file mode 100644 index 0000000..d8dc09e --- /dev/null +++ b/include/seiscomp/client/streamapplication.h @@ -0,0 +1,142 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef SEISCOMP_CLIENT_STREAM_APPLICATION_H +#define SEISCOMP_CLIENT_STREAM_APPLICATION_H + + +#include +#include +#include + +#include + + +namespace Seiscomp { +namespace Client { + + +class SC_SYSTEM_CLIENT_API StreamApplication : public Application { + // ---------------------------------------------------------------------- + // X'truction + // ---------------------------------------------------------------------- + public: + StreamApplication(int argc, char **argv); + ~StreamApplication(); + + + public: + bool openStream(); + void closeStream(); + + IO::RecordStream* recordStream() const; + + bool addStation(const std::string& networkCode, + const std::string& stationCode); + bool addStream(const std::string& networkCode, + const std::string& stationCode, + const std::string& locationCode, + const std::string& channelCode); + + void setStartTime(const Seiscomp::Core::Time&); + void setEndTime(const Seiscomp::Core::Time&); + bool setTimeWindow(const Seiscomp::Core::TimeWindow&); + + //! Sets whether to start the acquisition automatically + //! before the run loop or not. This method has to be called before run(). + //! The default is true. If set to false then the acquisition needs + //! to be started with readRecords or startRecordThread and + //! autoCloseOnAcquisitionFinished is also set to false. + void setAutoAcquisitionStart(bool); + + //! Sets the application close flag when acquisition is finished. + //! The default is true as auto start is true. If setAutoAcquisitionStart + //! is changed this flag is set as well. + void setAutoCloseOnAcquisitionFinished(bool); + + //! Sets the storage hint of incoming records. + //! The default is: DATA_ONLY + void setRecordInputHint(Record::Hint hint); + + //! Sets the data type of read records. + //! The default is: FLOAT + void setRecordDatatype(Array::DataType datatype); + + //! Returns the data type of the internal record sample buffer + Array::DataType recordDataType() const { return _recordDatatype; } + + void startRecordThread(); + void waitForRecordThread(); + bool isRecordThreadActive() const; + + + // ---------------------------------------------------------------------- + // Protected interface + // ---------------------------------------------------------------------- + protected: + bool init(); + bool run(); + void done(); + void exit(int returnCode); + + bool dispatch(Core::BaseObject* obj); + + void readRecords(bool sendEndNotification); + + //! This method gets called when the acquisition is finished + //! The default implementation closes the objects queue and + //! finishes the application + virtual void acquisitionFinished(); + + //! This method gets called when a new record has been received + //! by recordstream thread. + //! The default implementation stores it in the threaded object + //! queue which gets read by the main thread. + //! The input record is not managed and ownership is transferred + //! to this method. + virtual bool storeRecord(Record *rec); + + //! This method gets called when a record has been popped from + //! the event queue in the main thread. The ownership of the + //! pointer is transferred to this method. An empty function + //! body override would cause a memory leak. + virtual void handleRecord(Record *rec) = 0; + + //! Logs the received records for the last period + virtual void handleMonitorLog(const Core::Time ×tamp); + + + private: + bool _startAcquisition; + bool _closeOnAcquisitionFinished; + Record::Hint _recordInputHint; + Array::DataType _recordDatatype; + IO::RecordStreamPtr _recordStream; + std::thread *_recordThread; + size_t _receivedRecords; + ObjectLog *_logRecords; +}; + + +} +} + + +#endif diff --git a/include/seiscomp/config/api.h b/include/seiscomp/config/api.h new file mode 100644 index 0000000..683cae0 --- /dev/null +++ b/include/seiscomp/config/api.h @@ -0,0 +1,17 @@ +#ifndef SC_CONFIG_API_H +#define SC_CONFIG_API_H + +#if defined(WIN32) && (defined(SC_CONFIG_SHARED) || defined(SC_ALL_SHARED)) +# if defined(SC_CONFIG_EXPORTS) +# define SC_CONFIG_API __declspec(dllexport) +# define SC_CONFIG_TEMPLATE_EXPORT +# else +# define SC_CONFIG_API __declspec(dllimport) +# define SC_CONFIG_TEMPLATE_EXPORT extern +# endif +#else +# define SC_CONFIG_API +# define SC_CONFIG_TEMPLATE_EXPORT +#endif + +#endif diff --git a/include/seiscomp/config/config.h b/include/seiscomp/config/config.h new file mode 100644 index 0000000..0181f73 --- /dev/null +++ b/include/seiscomp/config/config.h @@ -0,0 +1,336 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef __SEISCOMP_CONFIG_H__ +#define __SEISCOMP_CONFIG_H__ + + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace Seiscomp { +namespace Config { + +/** + * Mapping of configuration variable to type + */ +typedef std::map Variables; + + +/** + * This is a class for reading and writing configuration files. Currently the + * following datatypes are supported: bool, int, double and std::string as well as + * lists of the datatypes + */ +class SC_CONFIG_API Config { + // ------------------------------------------------------------------------ + // X'struction + // ------------------------------------------------------------------------ + public: + Config(); + ~Config(); + + + // ------------------------------------------------------------------------ + // Public interface + // ------------------------------------------------------------------------ + public: + /** When names are queried and this check is enabled, it will + * throw an exception if the same name is defined in a later stage + * with respect to case insensitive name comparison. + * This allows to check for parameter inconsistencies that are + * hard to track otherwise. + */ + void setCaseSensitivityCheck(bool); + + /** Reads the given configuration file. + * @param file name of the configuration files + * @param stage Optional stage value to be set to each read symbol + * @param raw Raw mode which does not resolv references like ${var} + * @return true on success + */ + bool readConfig(const std::string& file, int stage=-1, bool raw=false); + + /** Writes the configuration to the given configuration file. + * @param file name of the configuarion files + * @param localOnly write only value read from this file and + * new entries + * @return true on success + */ + bool writeConfig(const std::string& file, bool localOny = true, + bool multilineLists = false); + + /** Writes the configuration to the file which was given to + * readConfing + * @return true on success + */ + bool writeConfig(bool localOnly = true); + + /** Sets the current logger. The ownership does not go to the config + * object. It is up to the caller to free resources. + * @param logger A logger implementation + */ + void setLogger(Logger *logger); + + /** Returns the symboltabel as string */ + std::string symbolsToString(); + + /** Returns the names of parameters */ + std::vector names() const; + + /** Returns the names of the visited files */ + std::string visitedFilesToString(); + + //! Gets an integer from the configuration file + //! @param name name of the element + //! @return value + int getInt(const std::string& name) const; + int getInt(const std::string& name, bool* error) const; + bool getInt(int& value, const std::string& name) const; + + bool setInt(const std::string& name, int value); + + /** Gets a double from the configuration file + * @param name name of the element + * @return double + */ + double getDouble(const std::string& name) const; + double getDouble(const std::string& name, bool* error) const; + bool getDouble(double& value, const std::string& name) const; + + bool setDouble(const std::string& name, double value); + + /** Gets an boolean from the configuration file + * @param name name of the element + * @return boolean + */ + bool getBool(const std::string& name) const; + bool getBool(const std::string& name, bool* error) const; + bool getBool(bool& value, const std::string& name) const; + + bool setBool(const std::string& name, bool value); + + /** Gets a string from the configuration file + * @param name name of the element + * @return string + */ + std::string getString(const std::string& name) const; + std::string getString(const std::string& name, bool* error) const; + bool getString(std::string& value, const std::string& name) const; + + bool setString(const std::string& name, const std::string& value); + + /** Removes the symbol with the given name from the symboltable. + * @param name Symbol to be removed + */ + bool remove(const std::string& name); + + std::vector getInts(const std::string& name) const; + + std::vector getInts(const std::string& name, bool* error) const; + + bool setInts(const std::string& name, const std::vector& values); + + std::vector getDoubles(const std::string& name) const; + + std::vector getDoubles(const std::string& name, bool* error) const; + + bool setDoubles(const std::string& name, const std::vector& values); + + std::vector getBools(const std::string& name) const; + + std::vector getBools(const std::string& name, bool* error) const; + + bool setBools(const std::string& name, const std::vector& values); + + std::vector getStrings(const std::string& name) const; + std::vector getStrings(const std::string& name, bool* error) const; + bool getStrings(std::vector& value, const std::string& name) const; + + bool setStrings(const std::string& name, const std::vector& values); + + SymbolTable *symbolTable() const; + + /** Evaluates a rvalue string and writes the output in result. + * The symbol table is taken from this instance. + * @param rvalue The value string to be parsed + * @param result The result string vector + * @param resolveReference Should references be resolved or not (eg + * environment variables). + * @return Success or error + */ + bool eval(const std::string &rvalue, + std::vector &result, + bool resolveReferences = true, + std::string *errmsg = NULL); + + /** Evaluates a rvalue string and writes the output in result. + * The symbol table is taken from this instance. + * @param rvalue The value string to be parsed + * @param result The result string vector + * @param resolveReference Should references be resolved or not (eg + * environment variables). + * @param The symbol table to be used to resolve references if enabled. + * @return Success or error + */ + static bool Eval(const std::string &rvalue, + std::vector &result, + bool resolveReferences = true, + SymbolTable *symtab = NULL, + std::string *errmsg = NULL); + + /** Writes the values of a symbol to an output stream. No new line + * is appended. + */ + static void writeValues(std::ostream &os, const Symbol *symbol, + bool multilineLists = false); + + /** Writes the content of the symbol to an output stream. No new line + * is appended. + */ + static void writeContent(std::ostream &os, const Symbol *symbol, + bool multilineLists = false); + + /** Writes a symbol to an output stream including the symbol + * name and a equal sign. A new line is appended. + */ + static void writeSymbol(std::ostream &os, const Symbol *symbol, + bool multilineLists = false); + + /** Enables/disables tracking of configuration variables. + */ + void trackVariables(bool enabled); + + /** Returns all configuration variables read by an application mapped + * to a type + */ + const Variables& getVariables() const; + + /** + * @brief Escapes a string value that it can be stored in the + * configuration file without further modifications. + * @return The escaped string inside double quotes if necessary + */ + std::string escape(const std::string &) const; + + + // ---------------------------------------------------------------------- + // Protected interface + // ---------------------------------------------------------------------- + protected: + /** Parses the given file + * @return true on success false on failure + */ + bool parseFile(std::istream &is); // virtual candidate + + + // ------------------------------------------------------------------------ + // Private interface + // ------------------------------------------------------------------------ + private: + void init(); + bool handleEntry(const std::string& entry, const std::string& comment); + bool handleInclude(const std::string& fileName); + void handleAssignment(const std::string& name, const std::string& content, + std::vector& values, + const std::string& comment); + std::vector tokenize(const std::string& entry); + static bool reference(const std::string &name, + std::vector &value, + const SymbolTable *symtab); + static bool parseRValue(const std::string& entry, + std::vector& parsedValues, + const SymbolTable *symtab, + bool resolveReferences, + bool rawMode, + std::string *errmsg); + + bool readInternalConfig(const std::string &file, SymbolTable *symbolTable, + const std::string &namespacePrefix, + int stage = -1, bool raw = false); + + template + T get(const std::string& name) const; + + template + T get(const std::string& name, bool* error) const; + + template + bool get(T& value, const std::string& name) const; + + template + std::vector getVec(const std::string& name) const; + + template + std::vector getVec(const std::string& name, bool* error) const; + + template + void add(const std::string& name, const T& value); + + template + void add(const std::string& name, const std::vector& values); + + /** Sets an value in the configuration file + * @param element name of the element + * @param value value for the element */ + template + bool set(const std::string& name, const T& value); + + template + bool set(const std::string& name, const std::vector& values); + + inline void addVariable(const std::string &name, const char *type) const; + + void releaseSymbolTable(); + + + // ------------------------------------------------------------------------ + // Private data members + // ------------------------------------------------------------------------ + private: + typedef std::deque Namespaces; + int _stage; + int _line; + bool _resolveReferences; + std::string _fileName; + Namespaces _namespaces; + std::string _namespacePrefix; + std::string _defaultNamespacePrefix; + Logger *_logger; + + SymbolTable *_symbolTable; + bool _trackVariables; + Variables _variables; +}; + + +} // namespace Config +} // namespace Seiscomp + +#endif diff --git a/include/seiscomp/config/config.ipp b/include/seiscomp/config/config.ipp new file mode 100644 index 0000000..f191264 --- /dev/null +++ b/include/seiscomp/config/config.ipp @@ -0,0 +1,224 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< +template +void Config::add(const std::string& name, const T& value) +{ + Symbol symbol; + symbol.name = name; + symbol.values.push_back(Private::toString(value)); + symbol.uri = ""; + + _symbolTable->add(symbol); +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< +template <> +void Config::add(const std::string& name, const std::string& value) +{ + Symbol symbol; + symbol.name = name; + symbol.values.push_back(value); + symbol.uri = ""; + + _symbolTable->add(symbol); +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< +template +void Config::add(const std::string& name, const std::vector& values) +{ + Symbol symbol; + symbol.name = name; + for (size_t i = 0; i < values.size(); ++i) + symbol.values.push_back(Private::toString(values[i])); + symbol.uri = ""; + + _symbolTable->add(symbol); +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< +template <> +void Config::add(const std::string& name, const std::vector& values) +{ + Symbol symbol; + symbol.name = name; + for (size_t i = 0; i < values.size(); ++i) + symbol.values.push_back(values[i]); + symbol.uri = ""; + + _symbolTable->add(symbol); +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< +template +bool Config::set(const std::string& name, const T& value) +{ + Symbol* symbol = _symbolTable->get(name); + if (!symbol) + { + add(name, value); + return true; + } + + symbol->values.clear(); + symbol->values.push_back(Private::toString(value)); + symbol->uri = ""; + + return true; +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + + +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< +template +bool Config::set(const std::string& name, const std::vector& values) +{ + Symbol* symbol = _symbolTable->get(name); + if (!symbol) + { + add(name, values); + return true; + } + + symbol->values.clear(); + for (size_t i = 0; i < values.size(); ++i) + symbol->values.push_back(Private::toString(values[i])); + + symbol->uri = ""; + + return true; +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< +template +T Config::get(const std::string& name) const { + const Symbol* symbol = _symbolTable->get(name); + if (!symbol) + throw OptionNotFoundException(name); + + T value = T(); + if (!Private::fromString(value, symbol->values[0])) + throw TypeConversionException(symbol->values[0]); + + return value; +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< +template +std::vector Config::getVec(const std::string& name) const { + const Symbol* symbol = _symbolTable->get(name); + if (!symbol) + throw OptionNotFoundException(name); + + std::vector values; + for (size_t i = 0; i < symbol->values.size(); ++i) + { + T tmp = T(); + if (!Private::fromString(tmp, symbol->values[i])) + throw TypeConversionException(symbol->values[i]); + values.push_back(tmp); + } + + return values; +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + + +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< +template +T Config::get(const std::string& name, bool* error) const +{ + *error = false; + try { + return get(name); + } + catch (...) { + *error = true; + return T(); + } +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + + +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< +template +bool Config::get(T& value, const std::string& name) const +{ + try { + value = get(name); + return true; + } catch (...) { + return false; + } +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< + + + + + +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< +template +std::vector Config::getVec(const std::string& name, bool* error) const +{ + *error = false; + try { + return getVec(name); + } + catch (...) { + *error = true; + return std::vector(); + } +} +// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< diff --git a/include/seiscomp/config/exceptions.h b/include/seiscomp/config/exceptions.h new file mode 100644 index 0000000..05c065e --- /dev/null +++ b/include/seiscomp/config/exceptions.h @@ -0,0 +1,80 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef __SEISCOMP_CONFIG_EXCEPTIONS_H__ +#define __SEISCOMP_CONFIG_EXCEPTIONS_H__ + + +#include +#include +#include + + +namespace Seiscomp { +namespace Config { + + +class SC_CONFIG_API Exception : public std::exception { + public: + Exception() : _what("Configuration exception") {} + Exception(const std::string &str) : _what(str) {} + Exception(const char *str) : _what(str) {} + virtual ~Exception() throw() {} + + const char *what() const throw() { return _what.c_str(); } + + private: + std::string _what; +}; + + +class SC_CONFIG_API OptionNotFoundException : public Exception { + public: + OptionNotFoundException() : Exception("Option not found") { } + OptionNotFoundException(const std::string& str) : Exception("Option not found for: " + str) { } +}; + + +class SC_CONFIG_API TypeConversionException : public Exception { + public: + TypeConversionException() : Exception("Type conversion error") { } + TypeConversionException(const std::string& str) : Exception("Type conversion error: " + str) { } +}; + + +class SC_CONFIG_API SyntaxException : public Exception { + public: + SyntaxException() : Exception("Syntax error") { } + SyntaxException(const std::string& str) : Exception("Syntax error: " + str) { } +}; + + +class SC_CONFIG_API CaseSensitivityException : public Exception { + public: + CaseSensitivityException() : Exception("Case-insensitiv names are ambiguous") { } + CaseSensitivityException(const std::string &str) : Exception("Case-insensitiv names are ambiguous: " + str) { } +}; + + +} // namespace Config +} // namespace Seiscomp + + +#endif diff --git a/include/seiscomp/config/log.h b/include/seiscomp/config/log.h new file mode 100644 index 0000000..a84ecd2 --- /dev/null +++ b/include/seiscomp/config/log.h @@ -0,0 +1,67 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef __SEISCOMP_CONFIG_LOG_H__ +#define __SEISCOMP_CONFIG_LOG_H__ + + +#include +#include + + +namespace Seiscomp { +namespace Config { + + +enum LogLevel { + ERROR, + WARNING, + INFO, + DEBUG +}; + + +struct SC_CONFIG_API Logger { + virtual ~Logger(); + virtual void log(LogLevel, const char *filename, int line, const char *msg); +}; + + +extern char log_msg_buffer[1024]; + + +#define CONFIG_LOG_CHANNEL(chan, msg, ...) \ + if ( _logger ) {\ + snprintf(log_msg_buffer, 1023, msg, __VA_ARGS__);\ + _logger->log(chan, _fileName.c_str(), _line, log_msg_buffer);\ + } + + +#define CONFIG_ERROR(msg, ...) CONFIG_LOG_CHANNEL(ERROR, msg, __VA_ARGS__) +#define CONFIG_WARNING(msg, ...) CONFIG_LOG_CHANNEL(WARNING, msg, __VA_ARGS__) +#define CONFIG_INFO(msg, ...) CONFIG_LOG_CHANNEL(INFO, msg, __VA_ARGS__) +#define CONFIG_DEBUG(msg, ...) CONFIG_LOG_CHANNEL(DEBUG, msg, __VA_ARGS__) + + +} +} + + +#endif diff --git a/include/seiscomp/config/symboltable.h b/include/seiscomp/config/symboltable.h new file mode 100644 index 0000000..9cded4f --- /dev/null +++ b/include/seiscomp/config/symboltable.h @@ -0,0 +1,135 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + +#ifndef __SEISCOMP_CONFIG_SYMBOLTABLE__ +#define __SEISCOMP_CONFIG_SYMBOLTABLE__ + +#include +#include +#include +#include +#include + +#include + +namespace Seiscomp { +namespace Config { + + +struct SC_CONFIG_API Symbol { + typedef std::vector Values; + + Symbol(const std::string& name, const std::string& ns, + const std::vector& values, + const std::string& uri, + const std::string& comment, + int stage = -1); + Symbol(); + + void set(const std::string& name, const std::string& ns, + const std::vector& values, + const std::string& uri, + const std::string& comment, + int stage = -1); + + bool operator ==(const Symbol& symbol) const; + + std::string toString() const; + + std::string name; + std::string ns; + std::string content; + Values values; + std::string uri; + std::string comment; + int stage; + int line; +}; + + + +class SC_CONFIG_API SymbolTable { + + private: + typedef std::map Symbols; + typedef std::vector SymbolOrder; + typedef std::map CISymbols; + + public: + typedef SymbolOrder::const_iterator iterator; + typedef std::set IncludedFiles; + typedef IncludedFiles::iterator file_iterator; + + public: + SymbolTable(); + + + public: + void setCaseSensitivityCheck(bool); + void setLogger(Logger *); + Logger *logger(); + + void add(const std::string& name, const std::string &ns, + const std::string& content, + const std::vector& values, + const std::string& uri, + const std::string& comment = "", + int stage=-1, int line=-1); + + void add(const Symbol& symbol); + + Symbol* get(const std::string& name); + const Symbol* get(const std::string& name) const; + + bool remove(const std::string& name); + + int incrementObjectCount(); + int decrementObjectCount(); + int objectCount() const; + + std::string toString() const; + + bool hasFileBeenIncluded(const std::string& fileName); + void addToIncludedFiles(const std::string& fileName); + + file_iterator includesBegin(); + file_iterator includesEnd(); + + iterator begin(); + iterator end(); + + private: + //! Returns true if an inconsistent definition has been found + bool checkCI(const std::string &name, const Symbol *) const; + + private: + bool _csCheck; + Symbols _symbols; + CISymbols _cisymbols; + SymbolOrder _symbolOrder; + IncludedFiles _includedFiles; + int _objectCount; + Logger *_logger; +}; + + +} // namespace Config +} // namespace Seiscomp + +#endif diff --git a/include/seiscomp/core.h b/include/seiscomp/core.h new file mode 100644 index 0000000..a1f6c50 --- /dev/null +++ b/include/seiscomp/core.h @@ -0,0 +1,17 @@ +#ifndef SC_SYSTEM_CORE_API_H +#define SC_SYSTEM_CORE_API_H + +#if defined(WIN32) && (defined(SC_SYSTEM_CORE_SHARED) || defined(SC_ALL_SHARED)) +# if defined(SC_SYSTEM_CORE_EXPORTS) +# define SC_SYSTEM_CORE_API __declspec(dllexport) +# define SC_SYSTEM_CORE_TEMPLATE_EXPORT +# else +# define SC_SYSTEM_CORE_API __declspec(dllimport) +# define SC_SYSTEM_CORE_TEMPLATE_EXPORT extern +# endif +#else +# define SC_SYSTEM_CORE_API +# define SC_SYSTEM_CORE_TEMPLATE_EXPORT +#endif + +#endif diff --git a/include/seiscomp/core/archive.h b/include/seiscomp/core/archive.h new file mode 100644 index 0000000..b5d2c8d --- /dev/null +++ b/include/seiscomp/core/archive.h @@ -0,0 +1,589 @@ +/*************************************************************************** + * Copyright (C) gempa GmbH * + * All rights reserved. * + * Contact: gempa GmbH (seiscomp-dev@gempa.de) * + * * + * GNU Affero General Public License Usage * + * This file may be used under the terms of the GNU Affero * + * Public License version 3.0 as published by the Free Software Foundation * + * and appearing in the file LICENSE included in the packaging of this * + * file. Please review the following information to ensure the GNU Affero * + * Public License version 3.0 requirements will be met: * + * https://www.gnu.org/licenses/agpl-3.0.html. * + * * + * Other Usage * + * Alternatively, this file may be used in accordance with the terms and * + * conditions contained in a signed written agreement between you and * + * gempa GmbH. * + ***************************************************************************/ + + +#ifndef SEISCOMP_CORE_ARCHIVE_H +#define SEISCOMP_CORE_ARCHIVE_H + + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include + + +#define DECLARE_ROOT_SERIALIZATION(RootClass) \ + public: \ + typedef Seiscomp::Core::Generic::Archive Archive; \ + virtual void serialize(Archive&) {} + +#define DECLARE_SERIALIZATION \ + public: \ + virtual void serialize(Archive& ar) override + + +namespace Seiscomp { +namespace Core { +namespace Generic { + + +// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +/** \brief A template archive interface + + An archive offers an interface to read from and write to datasources. + */ +template +class Archive { + // ------------------------------------------------------------------ + // Traits + // ------------------------------------------------------------------ + public: + typedef ROOT_TYPE RootType; + typedef boost::variant PropertyValue; + typedef std::map Properties; + + + // ------------------------------------------------------------------ + // Public Types + // ------------------------------------------------------------------ + public: + //! Serialization hints + enum { + NONE = 0, + STATIC_TYPE = 0x01, + //! Objects should not serialize their child elements + IGNORE_CHILDS = 0x02, + //! Objects are stored as XML nodes not as XML attributes + XML_ELEMENT = 0x04, + //! Objects are stored as XML cdata not as XML attributes + XML_CDATA = 0x08, + //! This attribute is mandatory even if empty + XML_MANDATORY = 0x10, + //! Objects are stored in a seperate database table and + //! not in columns of the parent object table + DB_TABLE = 0x20, + //! The time is stored in two records: time and microseconds + SPLIT_TIME = 0x40, + //! This is just an informational flag used for database + //! access mostly. This flag is the only one that will + //! be kept alive when serializing child objects. + INDEX_ATTRIBUTE = 0x80 + }; + + + // ------------------------------------------------------------------ + // Xstruction + // ------------------------------------------------------------------ + protected: + //! Constructor + Archive(); + + + public: + //! Destructor + virtual ~Archive() {} + + + // ------------------------------------------------------------------ + // Public Interface + // ------------------------------------------------------------------ + public: + static int PackVersion(int major, int minor) { return major << 16 | (minor & 0xFFFF); } + + /** Opens an archive. + Every archive class interpretes the dataSource parameter + in its own way. It can either point to a named dataSource (file) + or to a block of memory containing the actual archive data. + */ + virtual bool open(const char* dataSource); + + //! Creates a new archive + virtual bool create(const char* dataSource); + + virtual void close() = 0; + + /** + * @brief Sets strict reading mode. In strict mode optional attributes + * must be parsed correctly otherwise the archive is not valid. + * If strict mode is disabled then invalid optional attributes + * or objects are set to None or nullptr. + * @param strict Enabled or disabled + */ + void setStrictMode(bool strict); + bool isStrictMode() const; + + //! Queries whether the archive is in reading mode or not + bool isReading() const; + + //! Returns whether the last operation was successfull or not + bool success() const; + + //! Returns the serialization hints to propose a special + //! behaviour to serializable objects. + int hint() const; + + //! Sets the current serialization hint + void setHint(int); + + //! Sets the validity during serialization if needed + void setValidity(bool); + + void setVersion(Version v) { _version = v; } + Version version() const { return _version; } + + int versionMajor() const { return _version.majorTag(); } + int versionMinor() const { return _version.minorTag(); } + + template + bool isLowerVersion() const { + return _version.packed ::Value; + } + + template + bool isVersion() const { + return _version.packed == VersionPacker::Value; + } + + template + bool isHigherVersion() const { + return _version.packed > VersionPacker::Value; + } + + template + bool supportsVersion() const { + return _version.packed >= VersionPacker::Value; + } + + + // ------------------------------------------------------------------ + // Property interface + // ------------------------------------------------------------------ + public: + //! Returns the number of user set properties + size_t propertyCount() const; + + //! Sets a value for the named property. If the property does not + //! yet exist, it will be added and false will be returned. If + //! the property exists already, true is returned. The value is + //! updated in both cases. + bool setProperty(const char *name, const PropertyValue &v); + + //! Returns a property (if set) or nullptr pointer given a property + //! name. + const PropertyValue *property(const char *name) const; + + const int *propertyInt(const char *name) const; + const double *propertyDouble(const char *name) const; + const std::string *propertyString(const char *name) const; + + //! Removes all set properties + void clearProperties(); + + + // ------------------------------------------------------------------ + // Read methods + // ------------------------------------------------------------------ + public: + //! Reads an integer + virtual void read(std::int8_t &value) = 0; + virtual void read(std::int16_t &value) = 0; + virtual void read(std::int32_t &value) = 0; + virtual void read(std::int64_t &value) = 0; + + //! Reads a float + virtual void read(float &value) = 0; + //! Reads a double + virtual void read(double &value) = 0; + //! Reads a float complex + virtual void read(std::complex &value) = 0; + //! Reads a double complex + virtual void read(std::complex &value) = 0; + //! Reads a boolean + virtual void read(bool &value) = 0; + + //! Reads a vector of chars + virtual void read(std::vector &value) = 0; + + //! Reads a vector of ints + virtual void read(std::vector &value) = 0; + virtual void read(std::vector &value) = 0; + virtual void read(std::vector &value) = 0; + virtual void read(std::vector &value) = 0; + + //! Reads a vector of floats + virtual void read(std::vector &value) = 0; + + //! Reads a vector of doubles + virtual void read(std::vector &value) = 0; + + //! Reads a vector of complex doubles + virtual void read(std::vector > &value) = 0; + + //! Reads a vector of strings + virtual void read(std::vector &value) = 0; + + //! Reads a vector of time + virtual void read(std::vector