[seiscomp, scanloc] Install, add .gitignore

This commit is contained in:
2025-10-09 15:07:02 +02:00
commit 20f5301bb1
2848 changed files with 1315858 additions and 0 deletions

17
etc/defaults/fdsnws.cfg Normal file
View File

@ -0,0 +1,17 @@
# Defines a list of modules loaded at startup.
plugins = ${plugins}, fdsnxml
# SeisComP applications access waveform data through the RecordStream
# interface. Please consult the SeisComP documentation for a list of supported
# services and their configuration.
# This parameter configures the RecordStream URL, format:
# [service://]location[#type]. "service" is the name of the recordstream
# implementation. If "service" is not given "file://" is implied.
recordstream = sdsarchive://@ROOTDIR@/var/lib/archive
# Set the number of bytes to buffer for each chunk of waveform data served
# to the client. The lower the buffer the higher the overhead of Python Twisted.
# The higher the buffer the higher the memory usage per request. 100kB seems
# to be a good trade-off.
recordBulkSize = 102400

View File

@ -0,0 +1,5 @@
# UDP port for receiving GDRT messages. By default port 9999 will be used.
plugins.gdrt.udpport = 9999
# Location of station list file.
plugins.gdrt.stationsFrom = stations.txt

93
etc/defaults/global.cfg Normal file
View File

@ -0,0 +1,93 @@
# Default plugins to load. Application specific configuration
# files should use the 'plugins' entry to specify additional
# plugins otherwise when using 'core.plugins' also these
# default values are going to be overwritten.
#
# To be able to read from all supported databases all available
# database plugins are loaded as 'core'.
# All currently supported db backends: dbmysql, dbpostgresql, dbsqlite3
core.plugins = dbmysql
# Use log level 2 (error and warning)
logging {
level = 2
# Use logfiles. It is commented by default to allow applications to define
# console output with their hard coded defaults. If this setting is enabled
# it would otherwise always override the applications default logging
# backend.
#file = true
# Rotate the logfiles
file {
rotator = true
# Rotate each 86400 seconds (1 day)
rotator.timeSpan = 86400
# Keep 7 rotated log files
rotator.archiveSize = 7
}
}
# Server connection
connection.server = localhost/production
# The connection timeout
connection.timeout = 3
# How to transfer messages (binary, xml)?
connection.encoding = binary
# Use slink (seedlink) as record source service.
recordstream = slink://localhost:18000
# The agencyID to use when tagging processing results
agencyID = GFZ
# Organization name used mainly by ArcLink and SeedLink.
organization = Unset
# Configures the default filters selectable in manual picker.
# The entry with a leading "@" is selected as default filter.
picker.filters = \
"BP 0.1 - 1 Hz 3rd order;RMHP(10)>>ITAPER(30)>>BW(3,0.1,1)", \
"BP 0.1 - 2 Hz 3rd order;RMHP(10)>>ITAPER(30)>>BW(3,0.1,2)", \
"BP 0.4 - 1 Hz 3rd order;RMHP(10)>>ITAPER(30)>>BW(3,0.4,1)", \
"@BP 0.7 - 2 Hz 3rd order;RMHP(10)>>ITAPER(30)>>BW(3,0.7,2)", \
"BP 1 - 3 Hz 3rd order;RMHP(5)>>ITAPER(10)>>BW(3,1.0,3)", \
"BP 1 - 5 Hz 3rd order;RMHP(5)>>ITAPER(10)>>BW(3,1.0,5)", \
"BP 2 - 4 Hz 3rd order;RMHP(5)>>ITAPER(10)>>BW(3,2.0,4)", \
"BP 3 - 6 Hz 3rd order;RMHP(5)>>ITAPER(10)>>BW(3,3.0,6)", \
"BP 4 - 8 Hz 3rd order;RMHP(5)>>ITAPER(10)>>BW(3,4.0,8)", \
"HP 3 Hz 3rd order;RMHP(1)>>ITAPER(2)>>BW_HP(3,3)", \
"BP 0.7 - 2 Hz + STA/LTA(1,50);RMHP(10)->ITAPER(30)->BW(3,0.7,2)->STALTA(1,50)"
# Configure the columns of the event list that are visible initially.
# The first column containing the origin time is always visible and cannot
# be hidden.
# Possible values are:
# * Type
# * M
# * MType
# * Phases
# * Lat
# * Lon
# * Depth
# * Stat
# * Agency
# * Region
# * ID
eventlist.visibleColumns = M, MType, Phases, RMS, Lat, Lon, Depth, Stat, Agency, Region, ID
# Default travel time table configuration. Plugins can be added to for custom
# travel time table implementations.
# This configuration can be used by applications that need to know which
# interfaces are activated and which tables they define.
ttt {
libtau.tables = iasp91, ak135
LOCSAT.tables = iasp91, tab
homogeneous.tables = ""
}

22
etc/defaults/ql2sc.cfg Normal file
View File

@ -0,0 +1,22 @@
# Send journals and event specific updates to the EVENT group.
connection.primaryGroup = EVENT
# Receive objects from EVENT group. This is necessary to wait for event
# association of imported origins.
connection.subscriptions = EVENT
# Number of seconds to fetch missed updates on start up.
backLog = 1800
# Number of public objects to cache.
cacheSize = 5000
# Maximum number of notifiers to batch in one message. If set to 0 no size
# limit is enforced. Make sure to not hit the overall message size limited of
# 16MiB which is enforced by the messaging system.
batchSize = 2000
# If event synchronisation is enabled and an incoming origin is not yet
# associated with an event on the target machine then this timeout defines
# the maximum number of seconds to wait for an association.
eventAssociationTimeout = 10

3
etc/defaults/scalert.cfg Normal file
View File

@ -0,0 +1,3 @@
# Defines a list of message groups to subscribe to. The default is usually
# given by the application and does not need to be changed.
connection.subscriptions = EVENT, LOCATION, MAGNITUDE

18
etc/defaults/scamp.cfg Normal file
View File

@ -0,0 +1,18 @@
# Send to the AMPLITUDE group
connection.primaryGroup = AMPLITUDE
# Receive objects from PICK, AMPLITUDE and LOCATION group
connection.subscriptions = PICK, AMPLITUDE, LOCATION
# The amplitudes to compute triggered by an incoming Origin
amplitudes = MLv, mb, mB, Mwp
# The minimum arrival weight within an origin to compute amplitudes
# for the associated pick.
amptool.minimumPickWeight = 0.5
# Timeout in seconds of the first data packet of waveform data acquisition.
amptool.initialAcquisitionTimeout = 30
# Timeout in seconds of any subsequent data packet of waveform data acquisition.
amptool.runningAcquisitionTimeout = 2

283
etc/defaults/scanloc.cfg Normal file
View File

@ -0,0 +1,283 @@
plugins=${plugins}, scoresum
# Sets the logging level between 1 and 4 where 1=ERROR, 2=WARNING, 3=INFO and
# 4=DEBUG.
logging.level = 2
# Defines the primary group of a module. This is the name of the group where a
# module sends its messages to if the target group is not explicitely given in
# the send call.
connection.primaryGroup = LOCATION
# Defines a list of message groups to subscribe to. The default is usually
# given by the application and does not need to be changed.
connection.subscriptions = PICK, LOCATION
# Default source depth used for locating origins. The range of supported values
# depends on the configured travel-time interface and locator.
# When origin depth after locating is 0.0 or undefined, the depth is corrected
# to "defaultDepth" without relocating since the situation indicates issues.
# Note: A similar parameter may require configuration for score plugins.
defaultDepth = 0.0
# Maximum depth of an origin to be sent.
# Note: A similar parameter may require configuration for score plugins.
ignoreDepth = 650.0
# Compute distances in 3D for cluster search, phase association and internal
# event association. Otherwise, distances are measured from geographic
# coordinates ignoring elevation.
use3D = false
# Maximum allowed overall residual of an origin to be sent.
maxRMS = 1.5
# Minimum score of an origin to be sent.
minScore = 0.0
# Defines the score processor to be used for ranking origins of events.
# Additional configuration of the plugin and the plugin parameters is required.
score = OriginSum
# Time to keep picks. Time comparison is based on the pick times. In non-real
# time playbacks the time to keep picks is usually compressed.
buffer.pickKeep = 180.0
# Time to keep origins. Time comparison is based on the origin times. In
# non-real time playbacks the time to keep orgins is usually compressed.
buffer.originKeep = 180.0
# Ignore and do not buffer external origins received from messaging or from XML
# files.
# Default: Accept external origins from messaging or XML and associate more
# phases.
buffer.ignoreOrigins = false
# Ignore and do not buffer manual external origins received from messaging or
# from XML files.
# The parameter "buffer.ignoreOrigins" must be inactive for this option to be
# effective.
buffer.ignoreManualOrigins = true
# The time delta to the future. Positive values allow objects with times in the
# future to be buffered when the difference to current time or to creation time
# is not larger than the given value.
# Normally, objects like picks and origins are created after their actual times
# and all other objects are spurious and should be ignored. In case of wrong
# sensor timing or faster-than-real-time waveform playbacks, objects may be
# generated with actual times (pick time or origin time) in the future.
# If less or equal to 0.0, then all objects created before time of the object
# or with object times in the future are ignored.
buffer.futureTimeDelta = 0.0
# Phase type of considered picks. Valid types: P or Is.
clusterSearch.phaseType = P
# Minimum number of core points (picks) to form a cluster.
clusterSearch.minSize = 4
# Maximum number of picks in a cluster when stopping clustering. Eventual
# clusters may still be larger from core points. Using values < 0 deactivates
# the feature.
# The parameter is meant to support large-N networks where the high network
# density results in very many clusters with high computational load
# effectively slowing down the processing of picks. Using reasonable values
# such as 10 or 15 will reduce the number of clusters while maintaining high
# quality initial origins.
clusterSearch.maxSize = -1
# Maximum allowed difference between considered picks and cluster reference
# time (latest pick time). Optimum values can be derived from travel times.The
# cluster search ignores all picks which exceed the value. Using "-1" enables
# all picks regardless of the delay.
clusterSearch.maxPickDelay = -1.0
# Mode to set the cluster reference time. Read the documentation for the
# details. Available modes:
# LastPick: The pick time of the last incoming P pick is the cluster reference
# time.
# MaxPickDelay: Setting the cluster reference time is delayed until
# clusterSearch.minSize and association.minPhase picks are available.
clusterSearch.referenceTimeMode = LastPick
# The level on which to compare stream IDs of picks in order to avoid origins
# with multiple picks from the same sensor. The level is derived from the
# NET.STA.LOC.CHA stream representation. Picks with equal stream IDs on the
# given level are treated as originating from the same stream no matter of the
# ID on a higher level. Clusters having picks on streams equal on the given
# level are split into new clusters with unique streams. During association
# picks equal on the given level are ignored - the first assigned pick wins.
streamCheckLevel = loc
# Average velocity used for distance calculation.
clusterSearch.averageVelocity = 7.0
# Maximum allowed distance over all core points and maximum allowed distance
# for neighborhood search. Both account for travel-time difference,
# inter-station distance and the configured averageVelocity.
clusterSearch.maxSearchDist = 60
# Maximum allowed origins derived from cluster search. Set this option to zero
# to disable the cluster search completly.
clusterSearch.maxOrigins = 128
# Send origins based on cluster search as preliminary origins if the location
# fails. Otherwise those origins are ignored. Consider activating
# "checkOrigins".
clusterSearch.preliminary = false
# Check, if derived origins fulfill the cluster search criteria anymore.
# Activating this option may lower the number of events and suppress fake
# events.
clusterSearch.checkOrigins = false
# Number picks in cluster origin to be reached for starting to associate more P
# and S picks.
association.minPhase = 4
# Maximum distance from origin to stations for associating P and S picks.
association.maxDist = 1000.0
# Maximum allowed difference between measured and predicted arrival times for
# associating P phases to origin. Larger values allow significant correction of
# initial locations.
association.maxPResidual = 4.0
# Allow S-type picks without checking for a reference pick. References to P
# picks are typically added to S picks by scautopick but picks from other
# modules may not have this feature. When this option is active, the reference
# pick is not tested at all.
association.dropReferenceCheck = false
# Maximum allowed difference between measured and predicted arrival time for
# associating S phases to origins. Larger values allow significant correction
# of initial locations.
association.maxSResidual = 4.0
# Maximum allowed time residuals of picks after association and relocation.
# Picks will be removed at exceedence starting with largest residual and
# origins will be relocated.
association.maxResidual = 2.0
# The number of arrivals of an origin for controlling the association of more
# picks. Beyond this number, the minimum of maxPResidual or maxSResidual and
# maxResidual for associating P or S phases, respectively, is tested before
# association. Using values < 0 deactivates the feature.
# The parameter is meant to support large-N networks where the high network
# density results in very many picks and high computational load, effectively
# slowing down the quality check of picks. Setting a reasonable value, e.g.,
# >20 will only associate the best picks to origins already well constraint.
association.arrivalCountMinRes = -1
# Type of travel-time tables for phase association. May be different from
# locator.
association.tableType = LOCSAT
# Name of travel-time table used for phase association. May be different from
# locator profile. Using a different table/profile may result in increased
# runtime.
association.table = iasp91
# The locator type to be used. Use e.g. LOCSAT or Hypo71.
locator.type = LOCSAT
# The locator profile to be used. Using a different profile/table for locating
# and associating may result in increased runtime.
locator.profile = iasp91
# If enabled, the locator fixes the depth to the configured default value
# "defaultDepth" in case that all previous attempts to relocate fail. This
# option may result in many more origins. It prevents "ignoreDepth" from beeing
# effective if "defaultDepth" < "ignoreDepth".
locator.fixDepth = false
# If enabled, the locator is forced to fix the depth to the value configured by
# "defaultDepth". Free-depth solutions are excluded. Activating this
# option may be useful for sources with known depths or in case of sparse
# networks.
locator.forceFixDepth = false
# This parameter is only used in conjunction with
# "eventAssociation.maximumMatchingArrivalTimeDiff". If a station has multiple
# associated arrivals for a particular event, this flag defines if the time
# distance of a new pick to all arrivals must be within
# "eventAssociation.maximumMatchingArrivalTimeDiff" or if one matching arrival
# is enough.
eventAssociation.compareAllArrivalTimes = false
# Minimum number of matching picks between two origins to be associated to the
# same event.
eventAssociation.minMatchingArrivals = 3
# If this time window in seconds is negative, pickIDs are compared to find
# matching arrivals. A non negative value (including 0) compares pick times
# regardless of the pickID. Pass: |pick1.time - pick2.time| <= threshold.
eventAssociation.maxMatchingArrivalTimeDiff = -1
# Allowed difference in epicenter between an incoming origin compared with
# preferred origins to get associated.
eventAssociation.maxDist = 500.0
# Associates an origin with an existing event if the origin time differs not
# more than 60 seconds unless the minimumMatchingArrivals criteria matches.
eventAssociation.maxTimeSpan = 60.0
# Parameter "a" in the equation t = a x N + b.
# Increasing the value reduces the amount of sent origins. With the option --ep
# (playback mode) this value is set to 0.
publication.intervalTimeSlope = 0.5
# Parameter "b" in the equation t = a x N + b.
# Increasing the value reduces the amount of sent origins. With the option --ep
# (playback mode) this value is set to 0.
publication.intervalTimeIntercept = 0.0
# Integer interval to check the origin buffer for sending origins if no other
# origins have been created.
# Reducing the value may be required in EEW: it increases the load on scanloc
# but allows to send origins more rapidly.
publication.wakeUpInterval = 5
# The weight for number of used P arrival measures. Assumes only arrival time
# when exactly 1 value is set. Use a comma-separated list, wT,wSlo,wBaz, for
# giving specific weight to pick time, slowness and back azimuth, respectively.
# Example: 1.0,0.0,0.0 .
# The usage of slowness and back azimuth depends on locator. They are not
# considered by Hypo71, NonLinLoc, StdLoc and FixedHypocenter.
# Also used for Is infrasound phases when considered in clustering.
score.sum.p = 1.0
# The weight for number of added but unused P arrivals.
# Also used for Is when considered for clustering.
score.sum.p0 = 0.5
# The weight for number of used S arrival measures. Assumes only arrival time
# when exactly 1 value is set. Use a comma-separated list, wT,wSlo,wBaz, for
# giving specific weight to pick time, slowness and back azimuth, respectively.
# Example: 2.0,0.0,0.0 .
# The usage of slowness and back azimuth depends on locator. They are not
# considered by Hypo71, NonLinLoc, StdLoc and FixedHypocenter.
score.sum.s = 2.0
# The weight for number of added but unused S arrivals.
score.sum.s0 = 0.5
# Origin depth is normalized to this value for computing the score
# contribution. Shallower depths contribute to larger score.
score.sum.normalizationDepth = 650.0
# The weight of origin depth. Set this value to 0.0 for scoring origins
# independent of depth whenever shallow and deep sources are equally expected.
score.sum.depth = 1.0
# Origin RMS is normalized to this value for computing the score contribution.
# Lower RMS contribute to larger score.
score.sum.normalizationRMS = 1.5
# The weight of origin RMS residual.
score.sum.residual = 1.0
# Increase the weight for manual picks by a factor of 1.001. This gives
# preference to manual picks in case automatic ones co-exist for the same
# station.
score.sum.increaseManual = false

56
etc/defaults/scardac.cfg Normal file
View File

@ -0,0 +1,56 @@
# The URL to the waveform archive where all data is stored.
# Format: [service://]location[#type]
# "service": The type of the archive. If not given, "sds://" is implied
# assuming an SDS archive. The SDS archive structure is defined as
# YEAR/NET/STA/CHA/NET.STA.LOC.CHA.YEAR.DAYFYEAR, e.g.
# 2018/GE/APE/BHZ.D/GE.APE..BHZ.D.2018.125
# Other archive types may be considered by plugins.
archive = @ROOTDIR@/var/lib/archive
# Number of threads scanning the archive in parallel.
threads = 1
# Acceptable derivation of end time and start time of successive records in
# multiples of sample time.
jitter = 0.5
# Maximum number of segments per stream. If the limit is reached no more
# segments are added to the database and the corresponding extent is flagged as
# too fragmented. Use a negative value to disable any limit.
maxSegments = 1000000
# Line-based text file of form NET.STA.LOC.CHA defining available stream IDs.
# Depending on the archive type, size and storage media used this file may
# offer a significant performance improvement compared to collecting the
# available streams on each startup. Filters defined under `filter.nslc` still
# apply.
#nslcFile = ""
# Start of data availability check given as date string or as number of days
# before now.
#filter.time.start =
# End of data availability check given as date string or as number of days
# before now.
#filter.time.end =
# Comma-separated list of stream IDs to process. If empty all streams are
# accepted unless an exclude filter is defined. The following wildcards are
# supported: '*' and '?'.
#filter.nslc.include =
# Comma-separated list of stream IDs to exclude from processing. Excludes
# take precedence over includes. The following wildcards are supported: '*' and
# '?'.
#filter.nslc.exclude =
# If set to true all data chunks are read independent of their mtime.
mtime.ignore = false
# Only read chunks modified after specific date given as date string or as
# number of days before now.
#mtime.start =
# Only read chunks modified before specific date given as date string or as
# number of days before now.
#mtime.end =

View File

@ -0,0 +1,84 @@
## Send to the LOCATION group
connection.primaryGroup = LOCATION
## Receive objects from PICK and AMPLITUDE groups
connection.subscriptions = PICK, AMPLITUDE
## max. permissible RMS for a location to be reported
#autoloc.maxRMS = 3.5
## max. individual residual (unweighted) for a pick to
## be used in location
#autoloc.maxResidual = 7.0
## Max. secondary azimuth gap for an origin to be reported by.
## Default is 360 degrees, i.e. no restriction based on this parameter.
#autoloc.maxSGAP = 360
## Arrivals with exceptionally large amplitudes may be
## flagged as XXL, allowing (in future) faster, preliminary
## "heads-up" alerts.
#autoloc.thresholdXXL = 10000.
#autoloc.maxStationDistance = 180
#autoloc.maxDistanceXXL = 10
#autoloc.minPhaseCount = 6
#autoloc.minPhaseCountXXL = 4
## If the station count for stations at < 105 degrees
## distance exceeds this number, no picks at > 105 degrees will be
## used in location. They will be loosely associated, though.
#autoloc.minStaCountIgnorePKP = 15
## Clean-up interval for removing old/unused objects, in seconds
## Don't change.
#autoloc.cleanupInterval = 3600
## max. age for objects kept in memory, in seconds
## Default is 6 hours - don't change.
#autoloc.maxAge = 21600
## Don't change.
#autoloc.wakeupInterval = 5
## Grid configuration
#autoloc.grid = @DATADIR@/scautoloc/grid.conf
## Station configuration
#autoloc.stationConfig = @DATADIR@/scautoloc/station.conf
## This is only relevant in offline/testing mode
#locator.stationLocations = @DATADIR@/scautoloc/station-locations.conf
## Manual picks/origins can be fed back into autoloc for two purposes:
## * passive association to a solution from a "trusted" source so that we
## avoid fake or wrong locations due to events outside our area of interest
## * use the manual origins in further processing, especially the manual picks.
## Possibly also honor an operator specified fixed depth.
## Currently we only permit use of manual picks which are then used
## instead of the corresponding automatic picks (if existing)
# autoloc.useManualPicks = false
## Log all picks received by scautoloc to this file
autoloc.pickLog = @LOGDIR@/autoloc-picklog
# Amplitude type to be used as SNR amplitude
# Don't change unless you know exactly what you are doing.
autoloc.amplTypeSNR = snr
# Amplitude type to be used as absolute amplitude
# Don't change unless you know exactly what you are doing.
autoloc.amplTypeAbs = mb
# Use manual origins from our own agency. Essentially it means to
# use manual picks from manual origins, which is assumed to be
# better than using only automatic picks.
autoloc.useManualOrigins = false
# NOTE: If you set the above to true, then make sure to add the
# LOCATION group to connection.subscriptions!
# If autoloc.useManualOrigins is true, adopt the depth from manual
# origins, which is especially important if it was fixed by the analyst.
autoloc.adoptManualDepth = false

View File

@ -0,0 +1,87 @@
# Send to the PICK group
connection.primaryGroup = PICK
# Send amplitudes to this group
connection.amplitudeGroup = AMPLITUDE
# Receive objects from CONFIG group
connection.subscriptions = CONFIG
# The filter used to trigger
filter = "RMHP(10)>>ITAPER(30)>>BW(4,0.7,2)>>STALTA(2,80)"
# The time correction applied to a detected pick
timeCorrection = -0.8
# The record ringbuffer size in seconds
ringBufferSize = 300
# The leadTime defines the time in seconds to
# start picking on the streams before current
# time
leadTime = 60
# The initTime defines a timespan in seconds
# for that the picker is blind after initialization
# This time is needed to initialize the filter and
# depends on it
initTime = 60
# Interpolate gaps linearly? This is valid for gaps
# short than thresholds.maxGapLength
gapInterpolation = false
# For which value on a filtered stream is
# a pick detected
thresholds.triggerOn = 3
# The value the filtered stream must reach to
# enable detection again
thresholds.triggerOff = 1.5
# The maximum gap length to handle. Gaps larger
# than this size reset the picker
thresholds.maxGapLength = 4.5
# The timeWindow used to compute a maximum (snr)
# amplitude on the filtered stream
thresholds.amplMaxTimeWindow = 10
thresholds.deadTime = 30
thresholds.minAmplOffset = 3
# The amplitudes to compute triggered by
# a new P Pick continuously without having
# an Origin
amplitudes = MLv, mb, mB
# Configures the picker to use. By default only simple
# STALTA detections are emitted as picks. To enable "repicking"
# define a picker algorithm here.
picker = ""
# Configures the secondary picker to be used.
spicker = ""
# Configures the feature extraction type to be used
fx = ""
# If enabled the all streams are used for picking that are received by the
# picker. This option has only effect if a file is used as input which contains
# more data than the picker requests or if amplitudes are enabled which are using
# the horizontal components.
useAllStreams = false
# If enabled the all secondary pickers that were triggered by a previous pick
# will be terminated when a new detection or pick has been found. This aims to
# avoid the case where an S phase is wrongly picked as P but would also be
# picked as S by the secondary picker. But suppressing the S pick can lead to
# undesired results. It might be better in some situations to have two picks
# (P and S) instead only a wrong P.
killPendingSPickers = true
# If enabled and a picker is configured then detections are sent as well.
# To distinguish between detections and picks the evaluation mode of the pick
# is set to manual. This is meant to be a debug option which can be used to
# compare detections and picks by their evaluation mode.
sendDetections = false

2
etc/defaults/scesv.cfg Normal file
View File

@ -0,0 +1,2 @@
# Messaging subscriptions
connection.subscriptions = EVENT, MAGNITUDE, LOCATION, FOCMECH

203
etc/defaults/scevent.cfg Normal file
View File

@ -0,0 +1,203 @@
# Send to the EVENT group
connection.primaryGroup = EVENT
# Receive objects from LOCATION, MAGNITUDE and FOCMECH group
connection.subscriptions = LOCATION, MAGNITUDE, FOCMECH, EVENT
# A magnitudes needs at least 4 stationmagnitudes
# to become preferred
eventAssociation.minimumMagnitudes = 4
# An automatic origin will be associated to an
# event when it has at least 10 phases
eventAssociation.minimumDefiningPhases = 10
# Minimum score of an automatic origin to be allowed to
# form an new Event. This requires an activated score
# plugin. See parameter score.
# If set the minimumDefiningPhases has no effect at as
# this check will be superseded by the score check. It is
# the task of the score processor to evaluate a proper
# score for all input origins.
# By default this option is deactivated.
#eventAssociation.minimumScore = 1
# An automatic origin will be associated to an
# event when it falls inside this region.
# Format: min-lat, min-lon, max-lat, max-lon
#eventAssociation.region.rect = -90,-180,90,180
# Search 1800 seconds BEFORE origin time of a
# new location for matching events
eventAssociation.eventTimeBefore = 1800
# Search 1800 seconds AFTER origin time of a
# new location for matching events
eventAssociation.eventTimeAfter = 1800
# An origin will be associated to an existing
# event when at least 3 picks matches with
# former associated origins
eventAssociation.minimumMatchingArrivals = 3
# If this time window in seconds is negative, pickIDs
# are compared to find matching arrivals. A non negative
# value (including 0) compares pick times regardless
# of the pickID.
# Pass: |pick1.time - pick2.time| <= threshold
eventAssociation.maximumMatchingArrivalTimeDiff = -1
# This parameter is only used in conjunction with
# eventAssociation.maximumMatchingArrivalTimeDiff. If a station
# has multiple associated arrivals for a particular event, this
# flag defines if the time distance of a new pick to all arrivals
# must be within eventAssociation.maximumMatchingArrivalTimeDiff
# or if one matching arrival is enough.
eventAssociation.compareAllArrivalTimes = true
# Associates an origin with an existing event
# if the origin time differs not more
# than 60 seconds unless the minimumMatchingArrivals
# criteria matches.
eventAssociation.maximumTimeSpan = 60
# Associates an origin to an existing event
# when the location differs not more
# than 5 degrees unless the minimumMatchingArrivals
# criteria matches
eventAssociation.maximumDistance = 5
# Minimum number of station magnitudes required for Mw(mB) to be considered as
# preferred magnitude.
eventAssociation.minMwCount = 8
# If false then the station count rules out the magnitude priority
# which is only taken into account if two magnitudes have the
# same station count.
#
# If true then the priority rules out the station count
# which is only taken into account if two magnitudes have the
# same priority.
eventAssociation.magPriorityOverStationCount = false
# Minimum number of station magnitudes which ensures that Mw(mB) will be
# preferred and not mb.
eventAssociation.mbOverMwCount = 30
# Average between mb and Mw(mB) which must be exceeded to become Mw(mB)
# preferred.
eventAssociation.mbOverMwValue = 6
# The magnitude type priority list
# Magnitudes with other types cannot become
# preferred magnitudes
eventAssociation.magTypes = M
# The agencyID priority list
# When the eventtool comes to the point to select a preferred
# origin it orders all origins by its
# agency priority and selects then the best one among the
# highest priority agency.
# It also defines the agency priority for custom priority
# checks (eventAssociation.priorities)
#eventAssociation.agencies = GFZ
# The author priority list
# When the eventtool comes to the point to select a preferred
# origin it orders all origins by its
# author priority and selects then the best one among the
# highest priority author.
# It also defines the author priority for custom priority
# checks (eventAssociation.priorities)
#eventAssociation.authors = scautoloc@localhost
# The general priority list to decide if an origin becomes preferred. The
# priority decreases in the order of the parameters. This list is not used
# unless this parameter is activated.
# Empty priority list: scevent replicates the default hard wired behaviour:
# AGENCY, STATUS, PHASES_AUTOMATIC, TIME_AUTOMATIC
# Each item in the list corresponds to a check that is performed. Each check
# computes a score of the incoming origin (s1) and the current preferred origin
# (s2). If the s1 is lower than s2, the incoming origin is rejected and does
# not become preferred. All subsequent checks are ignored. If s1 is equal to
# s2, the next check in the list is performed. If s1 is larger than s2, the
# origin becomes preferred and all subsequent checks are ignored.
# Available tokens:
# AGENCY: check based on agency priorities
# AUTHOR: check based on author priorities
# MODE: evaluation mode priority: 0 = unset, 1 = automatic, 2 = manual, manual
# over-rules automatic
# STATUS: priority combined from evaluation status and evaluation mode: -100 =
# status is rejected, -1 = status is reported, 0 = status is preliminary or
# status is unset and mode is automatic, 1 = status is confirmed or status is
# unset and mode is manual, 2 = status is reviewed, 3 = status is final,
# METHOD: check based on the method priorities
# PHASES: higher phase count = higher priority
# PHASES_AUTOMATIC: only checks phase priorities for incoming automatic origins
# RMS: lower rms = higher priority
# RMS_AUTOMATIC: only check RMS on incoming automatic origins
# TIME: more recent origins (creationTime) have higher priorities
# TIME_AUTOMATIC: only check creationTime priority on incoming automatic
# origins
# SCORE: evaluates the score according to a configured ScoreProcessor and
# prefers the origin/focalmechanism with the highest score
#eventAssociation.priorities = AGENCY, STATUS, PHASES_AUTOMATIC, TIME_AUTOMATIC
# If true, one magnitude will be preferred even if magnitude criteria are
# not fullfilled.
eventAssociation.enableFallbackMagnitude = false
# The eventID prefix
# The eventID format is [prefix][year][code], e.g. gfz2008fdvg
eventIDPrefix = "gfz"
# Defines the pattern to generate an event ID.
# %p : prefix
# %Y : year
# %[w]c: alpha character
# %[w]C: upper case alpha character
# %[w]d: decimal
# %[w]x: hexadecimal
# %[w]X: upper case hexadecimal
eventIDPattern = "%p%Y%04c"
# Configures the number of event ID slots to look back and forth when an event
# ID is already taken. The default in previous versions was 5. Now -1 means
# that the margin is determined automatically based on
# "eventAssociation.eventTimeBefore" and "eventAssociation.eventTimeAfter".
# According to the configured "eventIDPattern" a fixed time range per slot can
# be computed and with that width the number of look ahead slots and look back
# slots can be computed based on the given time ranges for event association.
eventIDLookupMargin = -1
# Configures a timespan in seconds to delay origin association
#eventAssociation.delayTimeSpan = 0
# AgencyID filter used to delay origin association if
# eventAssociation.delayTimeSpan > 0
#eventAssociation.delayFilter.agencyID = agency
# Author filter used to delay origin association if
# eventAssociation.delayTimeSpan > 0
#eventAssociation.delayFilter.author = author
# evaluationMode filter used to delay origin association if
# eventAssociation.delayTimeSpan > 0. Allowed values are "manual" or "automatic"
#eventAssociation.delayFilter.evaluationMode = automatic
# Defines whether to associate or to ignore origins derived from CMT/MT
# inversions.
eventAssociation.ignoreFMDerivedOrigins = true
# If the preferred origin has evaluation status 'rejected' the event type will
# be set as 'not existing' unless the event type has been fixed by an operator
# or the preferred origin has been fixed.
eventAssociation.declareFakeEventForRejectedOrigin = false
# Allows to match picks that are associated with weight 0
eventAssociation.allowLooseAssociatedArrivals = false
# If enabled then the EventDescription with type 'Flinn-Engdahl region'
# will be populated with the Flinn-Engdahl region name.
populateFERegion = false

13
etc/defaults/scevtlog.cfg Normal file
View File

@ -0,0 +1,13 @@
connection.username = scevtlog
connection.subscriptions = EVENT, LOCATION, MAGNITUDE, PICK, AMPLITUDE
# The output directory
directory = @LOGDIR@/events
# The format to use to log events.
# Possible formats are: autoloc1, autoloc3 and xml.
# For compatibility reasons autoloc3 is the default
# format, but it is recommended to use xml, because
# it can be converted in autoloc1 and autoloc3 using
# scbulletin.
format = xml

75
etc/defaults/scheli.cfg Normal file
View File

@ -0,0 +1,75 @@
# List of stream codes to be plotted (net.sta.loc.cha). If not in capture mode
# only the first stream is shown. When using a list, the first entry is
# considered. Use commas for separating streams.
# Example: GR.MOX..BHZ
#heli.streams = GR.MOX..BHZ
# Filter to be applied on the data.
#heli.filter = BW(3,0.7,2.0)
# Filter to be applied on the data.
heli.numberOfRows = 48
# Length of data per trace.
heli.rowTimeSpan = 1800
# The time format used to print the start and end time of the whole plot (upper
# right corner). The format specification is the one used in the strftime
# function (man strftime).
heli.timeFormat = %F
# Sets current time to last data sample
heli.recordTime = false
# Line width of traces.
heli.lineWidth = 1
# A list of alternating row colors cycled through for painting traces.
heli.colors = FF0000, 0000FF
# Use anti aliasing to plot the traces. The default uses the settings from
# scheme.records.antiAliasing
heli.antialiasing = false
# Add stream description to traces.
heli.stream.description = true
# Define the method to scale traces within rows. Possible values are:
# minmax: Scale all rows to configured minimum and maximum amplitudes
# configured by amplitudeRange.min and amplitudeRange.max.
# row: Scale each row to the maximum within this row.
heli.amplitudeRange.scaling = minmax
# Minimum amplitude to show in trace. Requires amplitudeRange.scale = "minmax".
heli.amplitudeRange.min = -0.00001
# Minimum amplitude to show in trace. Requires amplitudeRange.scale = "minmax".
heli.amplitudeRange.max = +0.00001
# Image creation interval. Negative values disable image dumping. If enabled,
# images are generated at the configured interval.
heli.dump.interval = 0
# Name of output file. The filename can contain placeholders that are replaced
# by the corresponding streamID parts:
# %N : network code
# %S : station code
# %L : location code
# %C : channel code
# Placeholders are important if more than one stream is given and capture mode
# is active.
heli.dump.outputFile = /tmp/heli_%N_%S_%L_%C.png
# Image resolution
heli.dump.dpi = 300
# Number of pixels horizontally
heli.dump.xres = 1024
# Number of pixels vertically
heli.dump.yres = 768
# Defines the path to a script that is called whenever an image has been
# captured and written to disc. The only parameter is the path to the generated
# image.
scripts.postprocessing = ""

1
etc/defaults/scm.cfg Normal file
View File

@ -0,0 +1 @@
plugins = ${plugins}, mncursesplugin

57
etc/defaults/scmag.cfg Normal file
View File

@ -0,0 +1,57 @@
# Send to the MAGNITUDE group
connection.primaryGroup = MAGNITUDE
# Receive objects from PICK, AMPLITUDE and LOCATION group
connection.subscriptions = PICK, AMPLITUDE, LOCATION
# Interval between 2 sending processes. The interval has influence how often
# information is updated.
sendInterval = 1
# The minimum weight of an arrival to be used for magnitude
# calculations.
minimumArrivalWeight = 0.5
# Defines the types of magnitudes to calculate.
# A magnitude of a given type is going to be calculated
# only when a corresponding amplitude exists. Check
# the amplitudes calculated by scautopick and scamp also.
magnitudes = MLv, mb, mB, Mwp
# Defines the average method to use when computing
# the network magnitude. To define the average method
# per magnitude type append the type, eg:
# magnitudes.average = default, MLv:median
# The default behaviour is to compute the mean if less
# than 4 contributed station magnitudes exist otherwise
# a trimmed mean of 25 percent is used.
magnitudes.average = default
# Enable/disable calculation of a summary magnitude
summaryMagnitude.enabled = true
# This is the minimum station magnitude required for any
# magnitude to contribute to the summary magnitude at all. If
# this is set to 4 then no magnitude with less than 4 station
# magnitudes is taken into consideration even if this results
# in no summary magnitude at all. For this reason, the
# default here is 1 but in a purely automatic system it should
# be higher, at least 4 is recommended.
summaryMagnitude.minStationCount = 1
# Define the type of the summary magnitude
summaryMagnitude.type = M
# Define the coefficients to calculate the weight
# of a magnitude:
# weight = a*magStationCount+b
# Unnamed values define the default values
summaryMagnitude.coefficients.a = 0, Mw(mB):0.4, Mw(Mwp):0.4
summaryMagnitude.coefficients.b = 1, MLv:2, Mw(mB):-1, Mw(Mwp):-1
# Define the magnitudes to include into the the summary magnitude
# calculation
#summaryMagnitude.whitelist = ""
summaryMagnitude.blacklist = mB, Mwp
summaryMagnitude.singleton = true

100
etc/defaults/scmaster.cfg Normal file
View File

@ -0,0 +1,100 @@
# The available queues.
queues = production, playback
# The default group set
defaultGroups = AMPLITUDE, \
PICK, \
LOCATION, \
MAGNITUDE, \
FOCMECH, \
EVENT, \
QC, \
PUBLICATION, \
GUI, \
INVENTORY, \
ROUTING, \
CONFIG, \
LOGGING, \
IMPORT_GROUP, \
SERVICE_REQUEST, \
SERVICE_PROVIDE
interface {
bind = 0.0.0.0:18180
# List of IP masks which are allowed to access
#acl = 0.0.0.0/0
ssl {
bind = 0.0.0.0:-1 # Disabled by default
# List of IP masks which are allowed to access
#acl = 0.0.0.0/0
# The private server key. This key will not be shared with clients and
# must be kept secret.
key = @CONFIGDIR@/ssl/key.pem
# The server certificate shared with clients.
certificate = @CONFIGDIR@/ssl/cert.pem
}
}
queues {
production {
# Grant access to all connections
acl = 0.0.0.0/0
# The plugins loaded and executed for this particular queue.
# The execution order is exactly the same order as given here.
plugins = dbstore
processors {
messages = dbstore
# Configure the dbstore processor
messages {
dbstore {
# Select the database driver. This all depends on the
# loaded plugins.
driver = mysql
# Defines the read connection sent to the clients
read = sysop:sysop@localhost/seiscomp
# Defines the write connection for the plugin. This line
# will not be published and only be used internally.
write = sysop:sysop@localhost/seiscomp
# If enabled, the plugin will check the database schema
# version and refuse to start if the version doesn't match
# the latest version. If disabled and the an object needs
# to be stored which is incompatible with the database
# schema this object is lost. Leave this option enabled
# unless you know exactly what are you doing and what
# the consequences are.
strictVersionMatch = true
}
}
}
}
playback {
# Grant access only to localhost
acl = 127.0.0.1
}
}
http {
# The directory served by the http server at staticPath
filebase = @DATADIR@/scmaster/http/
# The URL path at which html files and assets are available.
# All files under filebase will be served at this URL path.
staticPath = /
# The URL path at which the broker websocket is available.
brokerPath = /
}

13
etc/defaults/scmv.cfg Normal file
View File

@ -0,0 +1,13 @@
# Enable maps legends
scheme.map.showLegends = true
# Messaging subscriptions
connection.subscriptions = AMPLITUDE, PICK, EVENT, LOCATION, MAGNITUDE, QC, CONFIG
# Sets the location of the map symbol legend (QC, ground motion).
# Use either: topleft, topright, bottomright or bottomleft.
mapLegendPosition = topleft
# Sets the location of the event symbol legend. Use either:
# topleft, topright, bottomright or bottomleft.
eventLegendPosition = bottomleft

69
etc/defaults/scolv.cfg Normal file
View File

@ -0,0 +1,69 @@
# Messaging subscriptions
connection.subscriptions = EVENT, LOCATION, FOCMECH, MAGNITUDE, PICK, CONFIG, GUI
# Load initially events of 1 day from
# database
loadEventDB = 1.0
# Define favourite pick phases
# These phases go into the "Picking" menu as
# top-level items
picker.phases.favourites = P, Pn, Pg, pP, S, Sg, sP
# Defines the phases (additionally to the pick phases)
# for which theoretical arrival times are computed and
# which are plotted into the trace
picker.showPhases = P, Pn, Pg, pP, sP, S, Sg
# When loading the traces of an event (origin) are
# all picks (not only the associated ones) going to be
# loaded within that timewindow from the database
picker.loadAllPicks = false
# Load all components (Z,N,E) when opening the picker
# per default
picker.loadAllComponents = false
# Magnitudes to compute manually by default
magnitudes = MLv, mb, mB, Mwp
# A list of magnitude types to be displayed in the summary widget (F8).
visibleMagnitudes = M, ML, MLv, mb, mB, Mwp, Mjma, Ms_20, Ms(BB)
# Default visible column set of arrival table. The
# order of the table columns is fixed and will not
# reflect the order given here.
# Possible values are:
# * Used
# * Status
# * Phase
# * Weight
# * Method
# * Polarity
# * Net
# * Sta
# * Loc/Cha
# * Timeres
# * Dis
# * Az
# * Time
# * +/-
# * Slo
# * Slores
# * Baz
# * Bazres
# * Created
# * Latency
olv.arrivalTable.visibleColumns = Used, Status, Phase, Net, Sta, Loc/Cha, Timeres, Dis, Az, Time, +/-
# enable/disable advanced options (magnitude parameters) for artificial
# origin creations
olv.artificialOriginAdvanced = false
# If a locator does not populate the take off angle in its arrivals
# the first motion plot will not show picked polarities. This option
# defines whether to compute take off angles that are not present
# in the arrivals or not.
olv.computeMissingTakeOffAngles = true
olv.systemTray = true

120
etc/defaults/scqc.cfg Normal file
View File

@ -0,0 +1,120 @@
#
# *** QcTool default configuration file ***
#
# Place a copy with your own modifications
# in ~/.seiscomp
#
# Send to the QC group
connection.primaryGroup = QC
# Receive objects from CONFIG group
connection.subscriptions = CONFIG
# ID of the creator
CreatorId="smi://de.gfz-potsdam/QcTool_0.3.1"
# use only configured streams (trunk/key/station_*) (z-component) (True/False)
# --> the same streams as e.g. scautopick works on
useConfiguredStreams = true
# If useConfiguredStreams is true then this parameter decides whether to use
# only the vertical component (default) or all three components.
# The 3 components are collected from the inventory according to channel
# orientation. If that is not possible then the fixed components N and E will
# be used.
use3Components = false
# if useConfiguredStreams == False then
# load (from inventory) only those streams, matching the streamMask
# RegEx e.g. "^(NET1|NET2)\.(STA1|STA2|STA3)\.(LOC)\.((BH)|(LH)|(HH))Z$"
# RegEx e.g. "^(.+)\.(.+)\.(.*)\.(.+)Z$"
streamMask = "^(.+)\.(.+)\.(.*)\.(BHZ)$"
# Database look up for past entries not older than x days
# (to determine the last QC parameter calculated)
# [days]
dbLookBack = 7
# currently implemented QcPlugins:
# QcDelay, QcLatency, QcTiming, QcRms, QcOffset, QcGap, QcSpike, QcOutage
#
# Load this plugins for calculating Qc Parameters
plugins = qcplugin_delay, \
qcplugin_latency, \
qcplugin_timing, \
qcplugin_rms, \
qcplugin_offset, \
qcplugin_gap, \
qcplugin_overlap, \
qcplugin_availability, \
qcplugin_spike, \
qcplugin_outage
# QcPlugin DEFAULT configuration
#
# Use this plugin only for realtime processing [True].
# Default [False] means, plugin is able to
# process archived data AND realtime data streams.
plugins.default.realTimeOnly = False
#
# Qc-Buffer Length [s] (aka LTA-Buffer)
# Must be >= plugins.*.bufferLength
plugins.default.buffer = 4000
plugins.default.filter = ""
#
# A R C H I V E
# Interval for sending archive messages [s]
# ... which will finally end up in the database
# so be carefull, not setting it too small!!!
# If set to -1, nothing gets written into the database
plugins.default.archive.interval = -1
# Archive Buffer length [s]
plugins.default.archive.buffer = 3600
#
# R E P O R T
# Interval for sending report messages [s]
# ... which e.g. may be displayed by scqcv (QcView)
plugins.default.report.interval = 60
# Report Buffer length [s]
plugins.default.report.buffer = 600
# Report messages are generated in case of no data is received since timeout seconds [s]
# (only in realtime processing mode)
plugins.default.report.timeout = 0
#
# A L E R T
# (only available in realtime processing mode)
# !!! STILL EXPERIMENTAL !!!
# Interval for checking alert thresholds [s]
# A value of -1 disables threshold checking.
plugins.default.alert.interval = -1
# Alert Buffer length [s] (aka STA-Buffer)
plugins.default.alert.buffer = 1800
# Alert threshold in percent [%], single value. [list: 25,50,75 ... not yet implemented]
plugins.default.alert.thresholds = 150
# QcPlugin SPECIFIC configuration
plugins.QcLatency.report.buffer = 60
plugins.QcLatency.report.timeout = 60
plugins.QcLatency.realTimeOnly = True
#
plugins.QcDelay.report.buffer = 60
plugins.QcDelay.report.timeout = 60
plugins.QcDelay.realTimeOnly = True
#
plugins.QcAvailability.report.timeout = 60
#
plugins.QcRms.report.buffer = 3600
plugins.QcRms.report.timeout = 0
plugins.QcRms.realTimeOnly = True
#
# If there is a gap of more than x [s],
# write an OUTAGE entry into the database.
plugins.QcOutage.notifyDB = 1800

445
etc/defaults/scqcv.cfg Normal file
View File

@ -0,0 +1,445 @@
# Defines the primary group of a module. This is the name of the group where a
# module sends its messages to if the target group is not explicitely given in
# the send call.
connection.primaryGroup = QC
# Defines a list of message groups to subscribe to. The default is usually
# given by the application and does not need to be changed.
connection.subscriptions = QC, CONFIG
# List of QC paramters to be displayed in the details table. Read the scqc
# documentation for a list of available QC parameters and the default
# configuration of scqcv $SEISCOMP_ROOT/etc/defaults/scqcv.cfg for more
# examples.
# Format: "Parameter name : ConfigName"
# Example: "delay : delay","spikes count : spike","spikes amplitude :
# spikeAmplitude" Refer to the parameters by their ConfigName to configure the
# attriutes.
parameter = "latency : latency",\
"delay : delay",\
"timing quality : timing",\
"offset : offset",\
"rms : rms",\
"gaps count : gap",\
"overlaps count : overlap",\
"availability : availability",\
"spikes count : spike"
# "gaps interval : gapInterval",\
# "gaps length : gapLength",\
# "spikes interval : spikeInterval",\
# "spikes amplitude : spikeAmplitude"
# "overlaps interval : overlapInterval",\
# "overlaps length : overlapLength"
# List of channels to display. By default the global binding configuration is
# used which can be overwritten with this parameter.
streams.codes = default
# Add new streams automatically to the streams configured in streams.codes when
# waveform QC parameters are provided for these streams.
streams.cumulative = false
# Length of data to be displayed.
streamWidget.length = 600
# Names of range profile to be considered. The range profiles define the
# background color table fields depending on the field value. Add the default
# ranges for which different intervals and the color are configured.
default.ranges = sane, bad
# Values are: int, float, percent, timeSpan
# Displays raw values if unset.
default.format = float
# Default time in seconds, how long a value is displayed in scqcv if no update
# is received. Setting to 0 means, show value until updated.
default.expire = 0
# A color defined by the color definitions below.
default.color = grey1
# Activate to display absolute values (modulus).
default.useAbsoluteValue = false
# Values are: int, float, percent, timeSpan
# Displays raw values if unset.
availability.format = percent
# Default time in seconds, how long a value is displayed in scqcv if no update
# is received. Setting to 0 means, show value until updated.
availability.expire = 600
# Value interval for range sane
availability.range.sane = 99.0, 101.0
# Contribution of this range for computing the score. The range must be
# considered.
availability.range.sane.count = 0
# Names of range profile to be considered The range profiles s define the
# background color table fields depending on the field value. Add the default
# ranges for which different intervals and the color are configured.
delay.ranges = sane, inter, bad
# Values are: int, float, percent, timeSpan
# Displays raw values if unset.
delay.format = timeSpan
# Value interval for range sane
delay.range.sane = 0.0, 60.0
# Value interval for range inter
delay.range.inter = 60, 120
# Contribution of this range for computing the score. The range must be
# considered.
delay.range.bad.count = -500
# A color defined by the color definitions below.
delay.range.bad.color = red
# Contribution of this range for computing the score. The range must be
# considered.
delay.range.inter.count = -1
# A color defined by the color definitions below.
delay.range.inter.color = yellow
# Contribution of this range for computing the score. The range must be
# considered.
delay.range.sane.count = 0
# A color defined by the color definitions below.
delay.range.sane.color = green
# Default time in seconds, how long a value is displayed in scqcv if no update
# is received. Setting to 0 means, show value until updated.
gap.expire = 600
# Value interval for range sane
gap.range.sane = 0.0, 0.0
# Contribution of this range for computing the score. The range must be
# considered.
gap.range.sane.count = 0
# Values are: int, float, percent, timeSpan
# Displays raw values if unset.
latency.format = timeSpan
# Value interval for range sane
latency.range.sane = 0.0, 600.0
# Contribution of this range for computing the score. The range must be
# considered.
latency.range.bad.count = -1000
# A color defined by the color definitions below.
latency.range.bad.color = grey
# Contribution of this range for computing the score. The range must be
# considered.
latency.range.sane.count = 0
# Activate to display absolute values (modulus).
offset.useAbsoluteValue = true
# Value interval for range sane
offset.range.sane = 0.0, 500.E3
# Contribution of this range for computing the score. The range must be
# considered.
offset.range.sane.count = 0
# Value interval for range sane
rms.range.sane = 10.0, 20.E3
# Contribution of this range for computing the score. The range must be
# considered.
rms.range.sane.count = 0
# A color defined by the color definitions below.
rms.range.sane.color = green
# Default time in seconds, how long a value is displayed in scqcv if no update
# is received. Setting to 0 means, show value until updated.
spike.expire = 600
# Value interval for range sane
spike.range.sane = 0.0, 0.0
# Contribution of this range for computing the score. The range must be
# considered.
spike.range.sane.count = 0
# Names of range profile to be considered The range profiles s define the
# background color table fields depending on the field value. Add the default
# ranges for which different intervals and the color are configured.
timing.ranges = sane, inter, bad
# Values are: int, float, percent, timeSpan
# Displays raw values if unset.
timing.format = int
# Activate to display absolute values (modulus).
timing.useAbsoluteValue = false
# Value interval for range sane
timing.range.sane = 90.0, 100.0
# Value interval for range inter
timing.range.inter = 50.0, 90.0
# Contribution of this range for computing the score. The range must be
# considered.
timing.range.bad.count = -100
# A color defined by the color definitions below.
timing.range.bad.color = darkred
# Contribution of this range for computing the score. The range must be
# considered.
timing.range.inter.count = -1
# A color defined by the color definitions below.
timing.range.inter.color = yellow
# Contribution of this range for computing the score. The range must be
# considered.
timing.range.sane.count = 0
# A color defined by the color definitions below.
timing.range.sane.color = green
#
range.bad = -99.9E99, 99.0E99
# Contribution of this range for computing the score. The range must be
# considered.
range.bad.count = -1
# A color defined by the color definitions below.
range.bad.color = red
# Contribution of this range for computing the score. The range must be
# considered.
range.inter.count = 0
# A color defined by the color definitions below.
range.inter.color = yellow
# Value interval for range sane
range.sane = 0.0, 99.0E99
# Contribution of this range for computing the score. The range must be
# considered.
range.sane.count = 0
# A color defined by the color definitions below.
range.sane.color = green
###############################################################################
# score values for overview window
# Ranges to be considered for forming the score. Set the count parameter from the
# respective range section for controlling the contribution of this range.
score.default.ranges = latency,\
delay,\
timing,\
below,\
r10,\
r09,\
r08,\
r07,\
r06,\
r05,\
r04,\
r03,\
r02,\
r01,\
above,\
unset
#
range.above = 0, 999
# A color defined by the color definitions below.
range.above.color = green
# Possible values: enableStream, disableStream
range.above.action = enableStream
#
range.below = -99, -11
# Contribution of this range for computing the score. The range must be
# considered.
range.below.count = 0
# A color defined by the color definitions below.
range.below.color = grey
# Possible values: enableStream, disableStream
range.below.action = disableStream
#
range.timing = -200, -100
# Contribution of this range for computing the score. The range must be
# considered.
range.timing.count = 0
# A color defined by the color definitions below.
range.timing.color = darkred
#
range.delay = -600, -500
# Contribution of this range for computing the score. The range must be
# considered.
range.delay.count = 0
# A color defined by the color definitions below.
range.delay.color = darkred
# define cumulative status ranges
range.latency = -1900, -1000
# Contribution of this range for computing the score. The range must be
# considered.
range.latency.count = 0
# A color defined by the color definitions below.
range.latency.color = grey
#
range.r01 = -1, -1
# Contribution of this range for computing the score. The range must be
# considered.
range.r01.count = 0
# A color defined by the color definitions below.
range.r01.color = red01
#
range.r02 = -2, -2
# Contribution of this range for computing the score. The range must be
# considered.
range.r02.count = 0
# A color defined by the color definitions below.
range.r02.color = red02
#
range.r03 = -3, -3
# Contribution of this range for computing the score. The range must be
# considered.
range.r03.count = 0
# A color defined by the color definitions below.
range.r03.color = red03
#
range.r04 = -4, -4
# Contribution of this range for computing the score. The range must be
# considered.
range.r04.count = 0
# A color defined by the color definitions below.
range.r04.color = red04
#
range.r05 = -5, -5
# Contribution of this range for computing the score. The range must be
# considered.
range.r05.count = 0
# A color defined by the color definitions below.
range.r05.color = red05
#
range.r06 = -6, -6
# Contribution of this range for computing the score. The range must be
# considered.
range.r06.count = 0
# A color defined by the color definitions below.
range.r06.color = red06
#
range.r07 = -7, -7
# Contribution of this range for computing the score. The range must be
# considered.
range.r07.count = 0
# A color defined by the color definitions below.
range.r07.color = red07
#
range.r08 = -8, -8
# Contribution of this range for computing the score. The range must be
# considered.
range.r08.count = 0
# A color defined by the color definitions below.
range.r08.color = red08
#
range.r09 = -9, -9
# Contribution of this range for computing the score. The range must be
# considered.
range.r09.count = 0
# A color defined by the color definitions below.
range.r09.color = red09
#
range.r10 = -10, -10
# Contribution of this range for computing the score. The range must be
# considered.
range.r10.count = 0
# A color defined by the color definitions below.
range.r10.color = red10
#
range.unset = 1000, 1000
# Contribution of this range for computing the score. The range must be
# considered.
range.unset.count = 0
# A color defined by the color definitions below.
range.unset.color = grey1
# R, G, B, alpha colors
color.red = 255, 000, 000, 64
color.yellow = 255, 255, 000, 64
color.green = 000, 255, 000, 100
color.blue = 000, 000, 255, 100
color.grey = 130, 130, 130, 255
color.grey1 = 240, 240, 240, 255
color.darkred = 255, 000, 000, 196
#
color.red10 = 255, 000, 000, 255
color.red09 = 255, 000, 000, 240
color.red08 = 255, 000, 000, 220
color.red07 = 255, 000, 000, 200
color.red06 = 255, 000, 000, 175
color.red05 = 255, 000, 000, 150
color.red04 = 255, 000, 000, 125
color.red03 = 255, 000, 000, 100
color.red02 = 255, 000, 000, 85
color.red01 = 255, 000, 000, 70
# valid formats are:
format.float = 2

8
etc/defaults/screloc.cfg Normal file
View File

@ -0,0 +1,8 @@
# Defines the primary group of a module. This is the name of the group where a
# module sends its messages to if the target group is not explicitely given in
# the send call.
connection.primaryGroup = LOCATION
# Defines a list of message groups to subscribe to. The default is usually
# given by the application and does not need to be changed.
connection.subscriptions = PICK, LOCATION

69
etc/defaults/scrttv.cfg Normal file
View File

@ -0,0 +1,69 @@
# Defines the primary group of a module. This is the name of the group where a
# module sends its messages to if the target group is not explicitely given in
# the send call.
connection.primaryGroup = GUI
# Defines a list of message groups to subscribe to. The default is usually
# given by the application and does not need to be changed.
connection.subscriptions = PICK, EVENT, LOCATION, GUI, CONFIG
# If greater than 0 then all traces for which the data latency is higher than
# this value are hidden.
maxDelay = 0
# If enabled then all traces are sorted by distance when a new origin arrives.
resortAutomatically = true
# If enabled, picks are shown.
showPicks = true
# Defines the filters to be used when filtering is activated.
filters = "RMHP(2)>>ITAPER(5)>>BW(3, 0.5, 8.0)","RMHP(2)>>ITAPER(5)>>BW_HP(3, 3)"
# Activates the first filter of the configured filter list after startup. This
# is equivalent to pressing 'f'.
autoApplyFilter = false
# Defines the buffer size in seconds of the ring bu of each trace.
bufferSize = 1800
# If set to true all traces will be visible on application startup independent
# of data availability.
allTracesInitiallyVisible = false
# Time span in seconds to switch back to the last view after an origin caused
# resorting. The default is 15min.
autoResetDelay = 900
# Defines a list of channels codes to be displayed. List items may contain
# wildcards at any position and are separated by comma. The list is intersected
# with all channels configured in inventory.
# Examples:
# default : display all streams configured by global bindings
# default, PF.BON.00.HH? : display default and all HH streams of PF.BON.00
streams.codes = default
# Latitude of the initial location for sorting traces.
streams.sort.latitude = 0.0
# Longitude of the initial location for sorting traces.
streams.sort.longitude = 0.0
# The sort mode applied initially
streams.sort.mode = distance
# Minimum longitude.
streams.region.lonmin = -180.0
# Maximum longitude.
streams.region.lonmax = 180.0
# Minimum latitude.
streams.region.latmin = -90.0
# Maximum latitude.
streams.region.latmax = 90.0
# Fixed depths for relocation. Any depth can be used during relocation. These
# are only shortcuts.
fixedDepths = 0, 10, 18

12
etc/defaults/scsohlog.cfg Normal file
View File

@ -0,0 +1,12 @@
# Defines the output file to be created at every timeout
monitor.output.file = @LOGDIR@/server.xml
# Defines the timeout interval in seconds. Every N seconds
# the XML output file is generated.
monitor.output.interval = 60
# Defines an output script which is called after the output
# file is generated to trigger file processing. The execution
# of the script is blocking the application and thus the
# script should not spend too much time with its operations.
#monitor.output.script = ""

24
etc/defaults/scwfas.cfg Normal file
View File

@ -0,0 +1,24 @@
# Defines an alternative SDS archive handler. This is the name of an
# RecordStream interface that can be loaded via a plugin.
# If not given an internal implementation will be used.
handlerSDS = ""
# Defines the filebase of the SDS archive. If an alternative archive handler
# is defined this value serves as input to setSource().
filebase = @ROOTDIR@/var/lib/archive
# Defines the server port for Arclink connections. By default the Arclink
# standard port 18001 will be used.
arclink.port = -1
# Defines the server port for FDSNWS connections. By default
# port 18082 will be used.
fdsnws.port = 8080
# Defines the base URL of the FDSN webservice that is
# given in the WADL document.
fdsnws.baseURL = http://localhost:8080/fdsnws
# Defines the aggregated maximum time window (seconds) for all requested "
# streams. A value of 0 will deactive any restriction.
fdsnws.maxTimeWindow = 0

251
etc/defaults/scwfparam.cfg Normal file
View File

@ -0,0 +1,251 @@
# default subscriptions groups
connection.subscriptions = PICK, AMPLITUDE, MAGNITUDE, LOCATION, EVENT
# The path to the processing info logfile.
wfparam.logfile = @LOGDIR@/scwfparam-processing-info.log
# Defines the white- and blacklist of data streams to be used.
# The rules to decide if a stream is used or not are the following:
# 1. if whitelist is not empty and the stream is not on the whitelist,
# don't use it, ok otherwise
# 2. if blacklist is not empty and the stream is on the blacklist,
# don't use it, ok otherwise
# Both checks are made and combined with AND.
# Either whitelist or blacklist contains a list of patterns (wildcard allowed
# as * and ?), eg GE.*.*.*, *, GE.MORC.*.BH?
# Each stream id (NET.STA.LOC.CHA) will be checked against the defined patterns
wfparam.streams.whitelist = ""
wfparam.streams.blacklist = ""
# Default value of total time window length in seconds
# if wfparam.magnitudeTimeWindowTable is not specified.
# This times window includes wfparam.preEventWindowLength.
wfparam.totalTimeWindowLength = 360
# Magnitude dependent time window table. The format is
# "mag1:secs1, mag2:secs2, mag3:secs3".
# If a magnitude falls between two configured magnitudes the time window
# of the lower magnitude is then used. No interpolation takes place.
# Magnitude outside the configured range are clipped to the lowest/highest
# value.
#wfparam.magnitudeTimeWindowTable = ""
# The pre event time window length in seconds.
wfparam.preEventWindowLength = 60
# Analogue to wfparam.magnitudeTimeWindowTable but instead giving a time
# window, the distance in km is specified.
#wfparam.magnitudeDistanceTable = ""
# The maximum epicentral distance in km of a station being considered for
# processing. This value is used if wfparam.magnitudeDistanceTable is not
# specified.
wfparam.maximumEpicentralDistance = 400
# Relative saturation threshold in percent. If the absolute raw amplitude
# exceeds X% of 2**23 counts the station will be excluded from processing.
wfparam.saturationThreshold = 80
# Specifies the STA length in seconds of the applied STA/LTA check.
wfparam.STAlength = 1
# Specifies the LTA length in seconds of the applied STA/LTA check.
wfparam.LTAlength = 60
# Specifies the minimum STALTA ratio to be reached to further process a station.
wfparam.STALTAratio = 3
# Specifies the number of seconds around P to be used to check the STA/LTA ratio
wfparam.STALTAmargin = 5
# Defines the factor applied to the signigicant duration to define the
# processing spetra time window. If that value is <= 0 the totalTimeWindowLength
# is used.
wfparam.durationScale = 1.5
# Specifies a list of damping values for computation of the relative
# displacement elastic response spectrum in percent.
wfparam.dampings = 5
# Specifies the number of natural periods for computation of the relative
# displacement elastic response spectrum between Tmin and Tmax.
wfparam.naturalPeriods = 100
# Defines if a linear spacing or logarithmic spacing between Tmin and Tmax
# is used. The default is a linear spacing. The logarithmic spacing will fail
# if either Tmin or Tmax is 0.
wfparam.naturalPeriods.log = false
# Specifies the minimum period (Tmin) in seconds for computation of the
# relative displacement lastic response spectrum.
wfparam.Tmin = 0
# Specifies the maximum period (Tmax) in seconds for computation of the
# relative displacement elastic response spectrum.
wfparam.Tmax = 5
# Enables/disables after shock removal.
wfparam.afterShockRemoval = true
# Enables/disables pre-event cut-off. A hardcoded sta/lta algorithm
# (with sta=0.1s, lta=2s, sta/lta threshold=1.2) is run on the time window
# defined by (expected_P_arrival_time - 15 s). The pre-event window is hence
# defined as [t(sta/lta =1.2) - 15.5 s, t(sta/lta =1.2) - 0.5 s].
wfparam.eventCutOff = 1
# Specifies the filter order of the general filter.
wfparam.filter.order = 4
# Magnitude dependent filter table. The format is
# "mag1:fmin1;fmax1, mag2:fmin2;fmax2, mag3:fmin3;fmax3".
# If a magnitude falls between two configured magnitudes the filter
# of the lower magnitude is then used. No interpolation takes place.
# Magnitude outside the configured range are clipped to the lowest/highest
# value.
# Frequency values are given as simple positive doubles (Hz is assumed) or
# with suffix "fNyquist" which is then multiplied by the Nyquist frequency
# of the data to get the final corner frequency.
wfparam.magnitudeFilterTable = 0:0.2;0.8fNyquist,\
3:0.1;0.8fNyquist,\
5:0.05;0.8fNyquist,\
7:0.025;0.8fNyquist
# Specifies the frequency of the general hi-pass filter. If this parameter is
# equal to 0 the hi-pass filter is not used. If suffix "fNyquist" is used
# then the value is multiplied by the Nyquist frequency of the data to get
# the final corner frequency of the filter.
wfparam.filter.loFreq = 0.025
# Specifies the frequency of the general lo-pass filter. If this parameter
# is equal to 0 the lo-pass filter is not used. If suffix "fNyquist" is
# used then the value is multiplied by the Nyquist frequency of the data to
# get the final corner frequency of the filter.
wfparam.filter.hiFreq = 40
# Specifies the filter order of the post-deconvolution filter.
wfparam.pd.order = 4
# Specifies the frequency of the post-deconvolution hi-pass filter.
# If this parameter is equal to 0 the hi-pass filter is not used. If
# suffix "fNyquist" is used then the value is multiplied by the Nyquist
# frequency of the data to get the final corner frequency of the filter.
wfparam.pd.loFreq = 0
# Specifies the frequency of the post-deconvolution lo-pass filter.
# If this parameter is equal to 0, the lo-pass filter is disabled.
# If suffix "fNyquist" is used then the value is multiplied by the Nyquist
# frequency of the data to get the final corner frequency of the filter.
wfparam.pd.hiFreq = 0
# Enables/disables deconvolution and thus the usage of wfparam.pd.order,
# wfparam.pd.loFreq and wfparam.pd.hiFreq. If a channel does not provide full
# response information it is not used for processing.
wfparam.deconvolution = true
# Specifies the interval in seconds to check/start scheduled operations.
wfparam.cron.wakeupInterval = 10
# Specifies the maximum allowed idle time of a process before removed.
# The idle time is calculated if no further processing is scheduled and
# computes as: [now]-lastRun.
wfparam.cron.eventMaxIdleTime = 3600
# Enables/disables updating of a cron log file. This file will be created under
# ~/.seiscomp/log/[appname].sched
# and contains information about the scheduled events and the processing queue.
# The file is updated each n seconds, where n = wfparam.cron.wakeupInterval.
wfparam.cron.logging = true
# Specifies the delay in seconds to delay processing if a new authoritative
# origin arrives for an event.
wfparam.cron.updateDelay = 60
# Specifies a list of delay times in seconds relative to event time to trigger
# the processing. When the first origin of an event arrives this list is used
# to construct the crontab for this event.
wfparam.cron.delayTimes = ""
# Specifies the initial acquisition timeout. If the acquisition source
# (eg Arclink) does not respond within this threshold with waveforms,
# the request is discarded.
wfparam.acquisition.initialTimeout = 30
# Specifies the acquisition timeout when waveforms are being transfered.
# If no new waveforms arrive within this threshold, the request is aborted.
# This is important if a Seedlink connection is configured which can block
# the application for a very long time if at least one requested channel has
# no data. Seedlink does not finished the request until all data has been sent.
# When data will arrive for a particular channel is not known.
wfparam.acquisition.runningTimeout= 2
# Enables generation of short output event id's.
wfparam.output.shortEventID = false
# Enables/disables the output of processed waveforms.
wfparam.output.waveforms.enable = false
# Specifies the waveform output path. This parameter is only used if
# wfparam.output.waveforms.enable is true.
wfparam.output.waveforms.path = @LOGDIR@/shakemaps/waveforms
# Enables/disables the creation of an event directory (named with eventID) when
# storing the processed waveforms. This parameter is only used if
# wfparam.output.waveforms.enable is true.
wfparam.output.waveforms.withEventDirectory = false
# Enables/disables the output of spectra (psa, drs). The output format is a
# simple ascii file where the first column is the period and the second column
# the corresponding value.
wfparam.output.spectra.enable = false
# Specifies the spectra output path. This parameter is only used if
# wfparam.output.spectra.enable is true.
wfparam.output.spectra.path = @LOGDIR@/shakemaps/spectra
# Enables/disables the creation of an event directory (named with eventID)
# when storing the spectra. This parameter is only used if
# wfparam.output.spectra.enable is true.
wfparam.output.spectra.withEventDirectory = false
# Enables/disables ShakeMap XML output.
wfparam.output.shakeMap.enable = true
# Specifies the ShakeMap XML output path. This is only used if
# wfparam.output.shakeMap.enable is set to true.
wfparam.output.shakeMap.path = @LOGDIR@/shakemaps
# Specifies a script that is called whenever a new ShakeMap XML is available.
# The script is called with 5 parameters:
# * EventID
# * modified EventID for ShakeMap output
# * path to event directory (where input/event.xml and input/event_dat.xml lives)
# No files are deleted by the application. The ownership goes to the
# called script.
#wfparam.output.shakeMap.script = ...
# Enables/disables synchronous or asynchronous script calls. If enabled,
# be careful not to spend too much time in the script. The application is
# blocked while the script is running.
wfparam.output.shakeMap.synchronous = true
# If enabled the maximum PGV, PGA, PSA03, PSA10 and PSA30 of both horizontal
# components is used in the final output. Otherwise each component is saved.
wfparam.output.shakeMap.maximumOfHorizontals = false
# The XML encoding string written to the Shakemap XML file.
wfparam.output.shakeMap.encoding = "UTF-8"
# The target version of the Shakemap input files.
wfparam.output.shakeMap.version = 3
# Enables messaging output which creates objects of the StrongMotionParameters
# data model extension (defined by SED) and sends them to scmaster. In order to
# save the objects to the database, scmaster needs to load the dmsm plugin and
# the corresponding database schema must be applied.
# The default message group is AMPLITUDE. To change this group redefine
# connection.primaryGroup.
wfparam.output.messaging = false
# Defines the magnitude tolerance to completely reprocess an event with respect
# to the last state.
wfparam.magnitudeTolerance = 0.5

114
etc/defaults/seedlink.cfg Normal file
View File

@ -0,0 +1,114 @@
# Default network code. Used when a network code is omitted by a client
# in STATION request. Should be set to the network code of the majority
# of configured stations. 1 or 2 characters long, uppercase.
network = XX
# Defines the Seedlink port to accept requests.
port = 18000
# Path to the base directory of SeedLink data files (disk buffer).
filebase = @ROOTDIR@/var/lib/seedlink/buffer
# List of trusted addresses.
trusted = 127.0.0.0/8
# List of IP addresses or IP/mask pairs (in ipchains/iptables syntax)
# that can access stations. Per station access definitions
# supersede this parameter. By default any client can access
# all stations.
access = 0.0.0.0/0
# Check start and end times of streams.
stream_check = true
# If stream_check = enabled, also check for gaps in all channels that
# match given pattern. Register all gaps that are larger than +-0.5 seconds.
# gap_check_pattern = [EBLV][HLNG][ZNE]|S[NG][ZNE].
# Disabled to save memory.
gap_check_pattern = XXXXX
# Time difference between records (microseconds) above which a gap is declared.
gap_treshold = 500000
# Can be enabled or disabled. Required for slinktool option -tw.
window_extraction = true
# Same as window_extraction for trusted IP addresses.
window_extraction_trusted = true
# Allow websocket connections.
websocket = false
# Same as websocket for trusted IP addresses.
websocket_trusted = false
# If activated Seedlink uses the mseedfifo to read records and
# only the mseedfifo_plugin is started. This command is useful
# to playback historic data by eg msrtsimul.
msrtsimul = false
# Size of memory buffer (number of recent Mini-SEED records kept in RAM).
buffers = 100
# Number of disk buffer segments (files under <dir>/station/segments/
# where <dir> is the directory given by the filebase parameter).
segments = 50
# Size of one disk buffer segment in the records (512-byte units).
segsize = 1000
# Number of blank records to insert after the re-scan of disk buffer
# if <dir>/station/buffer.xml is not found (assuming the server
# did not terminate correctly).
blanks = 10
# Encoding of Mini-SEED records created by SeedLink. The value must be
# steim1 or steim2. If omitted, the global encoding parameter is used.
encoding = steim2
# INFO provided to arbitrary Internet hosts: ID, CAPABILITIES, STATIONS,
# STREAMS
info = streams
# INFO provided to trusted hosts: ID, CAPABILITIES, STATIONS, STREAMS,
# GAPS, CONNECTIONS, ALL
info_trusted = all
# Show requests in log file
request_log = true
# Give warning if an input channel has time gap larger than 10 us
proc_gap_warn = 10
# Flush streams if an input channel has time gap larger than 0.1 s
proc_gap_flush = 100000
# Reset FIR filters if an input channel has time gap larger than 1 s
proc_gap_reset = 1000000
# Enable backfilling buffer for out-of-order records when raw samples
# are transmitted. This values defines its capacity in seconds.
backfill_buffer = 0
# Maximum allowed deviation from the sequence number of oldest packet if
# packet with requested sequence number is not found. If seq_gap_limit is
# exceeded, data flow starts from the next packet coming in, otherwise
# from the oldest packet in buffer.
# Use the following to always start with the oldest packet:
# seq_gap_limit = 16777216
seq_gap_limit = 100000
# Total number of TCP/IP connections allowed
connections = 500
# Maximum number of TCP/IP connections per IP
connections_per_ip = 20
# Maximum speed per connection (0: throttle disabled)
bytespersec = 0
# Define a database read connection to be used for Seedlink station descriptions.
# If no database is configured (which is the default) then the station code will be used.
# If a remote host is specified, ensure that its database server is reachable from this computer.
inventory_connection = ""

View File

@ -0,0 +1,72 @@
# Host of the Seedlink server to connect to. If the acquisition
# is running on one system nothing needs to be changed.
address = 127.0.0.1
# The port of the Seedlink server to connect to. If the acquisition
# is running on one system this port must match the configured
# local Seedlink port.
port = 18000
# Path to waveform archive where all data is stored. Relative paths
# (as the default) are treated relative to the installation
# directory ($SEISCOMP_ROOT).
archive = var/lib/archive
# Number of records (512 byte units) to buffer before flushing to
# disk.
buffer = 1000
# The network reconnect delay (in seconds) for the connection
# to the SeedLink server. If the connection breaks for any
# reason this will govern how soon a reconnection should be
# attempted. The default value is 30 seconds.
delay = 30
# The network timeout (in seconds) for the connection to the
# SeedLink server. If no data [or keep alive packets?] are received
# in this time range the connection is closed and re-established
# (after the reconnect delay has expired). The default value is
# 600 seconds. A value of 0 disables the timeout.
networkTimeout = 900
# Timeout for closing idle data stream files in seconds. The idle
# time of the data streams is only checked when some packets has
# arrived. If no packets arrived no idle stream files will be
# closed. There is no reason to change this parameter except for
# the unusual cases where the process is running against an open
# file number limit. Default is 300 seconds.
idleTimeout = 300
# Interval (in seconds) at which keepalive (heartbeat) packets
# are sent to the server. Keepalive packets are only sent if
# nothing is received within the interval. This requires a
# Seedlink version >= 3.
keepalive = 0
# Path to cerificate store where all certificates and CRLs are stored. Relative
# paths(as the default) are treated relative to the installation directory
# ($SEISCOMP_ROOT). If the signature check is enabled slarchive loads all files
# at start. The store uses the OpenSSl store format. From the offical OpenSSL
# documentation: "The directory should contain one certificate or CRL per file
# in PEM format, with a file name of the form hash.N for a certificate, or
# hash.rN for a CRL. The .N or .rN suffix is a sequence number that starts at
# zero, and is incremented consecutively for each certificate or CRL with the
# same hash value. Gaps in the sequence numbers are not supported, it is
# assumed that there are no more objects with the same hash beyond the first
# missing number in the sequence.The .N or .rN suffix is a sequence number that
# starts at zero, and is incremented consecutively for each certificate or CRL
# with the same hash value. Gaps in the sequence numbers are not supported, it
# is assumed that there are no more objects with the same hash beyond the first
# missing number in the sequence." The hash value can be obtained as follows:
# openssl x509 -hash -noout -in >file<
validation.certs = var/lib/certs
# Signatures are expected to be carried in blockette 2000 as opaque data.
# Modes:
# ignore : Signatures will be ignored and no further actions will be taken.
# warning: Signatures will be checked and all received records which do not
# carry a valid signature or no signature at all will be logged with at warning
# level. skip : All received records without a valid signature will be ignored
# and will not be processed.
validation.mode = ignore