Skip to content

Commit c822f57

Browse files
Merge pull request #513 from eqcorrscan/develop
Release 0.4.4 Merge
2 parents 6286e4f + d917b3a commit c822f57

29 files changed

+519
-231
lines changed

.github/test_conda_env.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ channels:
55
dependencies:
66
- numpy>=1.12
77
- matplotlib>=1.3.0
8-
- scipy>=0.18
8+
- scipy>=0.18,<1.9.0 # Pinned due to scipy/obspy hanning renaming
99
- mock
1010
- obspy>=1.3.0
1111
- h5py
@@ -17,7 +17,7 @@ dependencies:
1717
- pytest-pep8
1818
- pytest-xdist
1919
- pytest-rerunfailures
20-
- pytest-mpl
20+
- pytest-mpl<0.16.0
2121
- codecov
2222
- pip
2323
- pip:

.github/test_conda_env_macOS.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ dependencies:
1414
- llvm-openmp>=4.0.1
1515
- numpy>=1.12
1616
- matplotlib>=1.3.0
17-
- scipy>=0.18
17+
- scipy>=0.18,<1.9.0 # Pinned due to scipy/obspy hanning renaming
1818
- mock
1919
- obspy>=1.3.0
2020
- h5py<3.2 # Issue with dep resolution: https://github.com/conda-forge/h5py-feedstock/issues/92
@@ -26,7 +26,7 @@ dependencies:
2626
- pytest-pep8
2727
- pytest-xdist
2828
- pytest-rerunfailures
29-
- pytest-mpl
29+
- pytest-mpl<0.16.0
3030
- codecov
3131
- pip
3232
- pip:

.github/workflows/runtest.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ jobs:
6565
- name: run main test suite
6666
shell: bash -l {0}
6767
run: |
68-
py.test -n 2 -m "not serial and not network and not superslow" --cov-report=xml
68+
py.test -n 2 -m "not serial and not network and not superslow" --cov-report=xml --dist loadscope
6969
7070
- name: run serial test
7171
if: always()

CHANGES.md

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,17 @@
11
## Current
2+
* core.match_filter
3+
- Bug-fix: peak-cores could be defined twice in _group_detect through kwargs.
4+
Fix: only update peak_cores if it isn't there already.
25
* core.match_filter.tribe
36
- Detect now allows passing of pre-processed data
7+
* core.match_filter.template
8+
- Remove duplicate detections from overlapping windows using `._uniq()`
9+
* core.lag_calc._xcorr_interp
10+
- CC-interpolation replaced with resampling (more robust), old method
11+
deprecated. Use new method with use_new_resamp_method=True as **kwarg.
12+
* core.lag_calc:
13+
- Fixed bug where minimum CC defined via min_cc_from_mean_cc_factor was not
14+
set correctly for negative correlation sums.
415
* utils.correlate
516
- Fast Matched Filter now supported natively for version >= 1.4.0
617
- Only full correlation stacks are returned now (e.g. where fewer than than
@@ -21,6 +32,13 @@
2132
the old parallelization strategy across traces.
2233
- Now includes `all_horiz`-option that will correlate all matching horizontal
2334
channels no matter to which of these the S-pick is linking.
35+
* utils.clustering
36+
- Allow to handle indirect comparison of event-waveforms when (i.e., events
37+
without matching traces which can be compared indirectly via a third event)
38+
- Allows to set clustering method, metric, and sort_order from
39+
scipy.cluster.hierarchy.linkage.
40+
* tribe, template, template_gen, archive_read, clustering: remove option to read
41+
from seishub (deprecated in obspy).
2442

2543
## 0.4.3
2644
* core.match_filter

CONTRIBUTORS.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,3 +8,4 @@
88
* Felix Halpaap
99
* Iman Kahbasi
1010
* eQ Halauwet
11+
* Glenn Nelson

eqcorrscan/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525

2626
__all__ = ['core', 'utils', 'tutorials', 'tests']
2727

28-
__version__ = '0.4.3'
28+
__version__ = '0.4.4'
2929

3030
# Cope with changes to name-space to remove most of the camel-case
3131
_import_map = {}

eqcorrscan/core/lag_calc.py

Lines changed: 44 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
from eqcorrscan.core.match_filter.template import Template
2626
from eqcorrscan.utils.plotting import plot_repicked
2727

28+
show_interp_deprec_warning = True
2829

2930
Logger = logging.getLogger(__name__)
3031

@@ -43,14 +44,19 @@ def __str__(self):
4344
return 'LagCalcError: ' + self.value
4445

4546

46-
def _xcorr_interp(ccc, dt):
47+
def _xcorr_interp(ccc, dt, resample_factor=10, use_new_resamp_method=False,
48+
**kwargs):
4749
"""
48-
Interpolate around the maximum correlation value for sub-sample precision.
50+
Resample correlation-trace and check if there is a better CCC peak for
51+
sub-sample precision.
4952
5053
:param ccc: Cross-correlation array
5154
:type ccc: numpy.ndarray
5255
:param dt: sample interval
5356
:type dt: float
57+
:param resample_factor:
58+
Factor for upsampling CC-values (only for use_new_resamp_method=True)
59+
:type resample_factor: int
5460
5561
:return: Position of interpolated maximum in seconds from start of ccc
5662
:rtype: float
@@ -59,6 +65,32 @@ def _xcorr_interp(ccc, dt):
5965
cc = ccc[0]
6066
else:
6167
cc = ccc
68+
69+
# New method with resampling - make this the default in a future version
70+
if use_new_resamp_method:
71+
cc_resampled = scipy.signal.resample(cc, len(cc) * resample_factor + 1)
72+
dt_resampled = dt / resample_factor
73+
cc_t = np.arange(0, len(cc_resampled) * dt_resampled, dt_resampled)
74+
peak_index = cc_resampled.argmax()
75+
cc_peak = max(cc_resampled)
76+
77+
shift = cc_t[peak_index]
78+
if (cc_peak < np.amax(cc) or cc_peak > 1.0 or
79+
not 0 < shift < len(ccc) * dt):
80+
# Sometimes the interpolation returns a worse result.
81+
Logger.warning("Interpolation did not give an accurate result, "
82+
"returning maximum in data")
83+
return np.argmax(ccc) * dt, np.amax(ccc)
84+
return shift, cc_peak
85+
86+
# Otherwise use old interpolation method, but warn with deprcation message
87+
# (but show it only once):
88+
global show_interp_deprec_warning
89+
if show_interp_deprec_warning:
90+
Logger.warning(
91+
'This method for interpolating cross-correlations is deprecated, '
92+
'use a more robust method with use_new_resamp_method=True')
93+
show_interp_deprec_warning = False
6294
# Code borrowed from obspy.signal.cross_correlation.xcorr_pick_correction
6395
cc_curvature = np.concatenate((np.zeros(1), np.diff(cc, 2), np.zeros(1)))
6496
cc_t = np.arange(0, len(cc) * dt, dt)
@@ -191,7 +223,8 @@ def xcorr_pick_family(family, stream, shift_len=0.2, min_cc=0.4,
191223
min_cc_from_mean_cc_factor=None,
192224
horizontal_chans=['E', 'N', '1', '2'],
193225
vertical_chans=['Z'], cores=1, interpolate=False,
194-
plot=False, plotdir=None, export_cc=False, cc_dir=None):
226+
plot=False, plotdir=None, export_cc=False, cc_dir=None,
227+
**kwargs):
195228
"""
196229
Compute cross-correlation picks for detections in a family.
197230
@@ -273,8 +306,9 @@ def xcorr_pick_family(family, stream, shift_len=0.2, min_cc=0.4,
273306
checksum, cccsum, used_chans = 0.0, 0.0, 0
274307
event = Event()
275308
if min_cc_from_mean_cc_factor is not None:
276-
cc_thresh = min(detection.detect_val / detection.no_chans
277-
* min_cc_from_mean_cc_factor, min_cc)
309+
cc_thresh = min(abs(detection.detect_val / detection.no_chans
310+
* min_cc_from_mean_cc_factor),
311+
min_cc)
278312
Logger.info('Setting minimum cc-threshold for detection %s to %s',
279313
detection.id, str(cc_thresh))
280314
else:
@@ -285,7 +319,7 @@ def xcorr_pick_family(family, stream, shift_len=0.2, min_cc=0.4,
285319
tr = detect_stream.select(
286320
station=stachan.channel[0], channel=stachan.channel[1])[0]
287321
if interpolate:
288-
shift, cc_max = _xcorr_interp(correlation, dt=delta)
322+
shift, cc_max = _xcorr_interp(correlation, dt=delta, **kwargs)
289323
else:
290324
cc_max = np.amax(correlation)
291325
shift = np.argmax(correlation) * delta
@@ -387,7 +421,7 @@ def _prepare_data(family, detect_data, shift_len):
387421
length = round(length_samples) / family.template.samp_rate
388422
Logger.info("Setting length to {0}s to give an integer number of "
389423
"samples".format(length))
390-
prepick = shift_len
424+
prepick = shift_len + family.template.prepick
391425
detect_streams_dict = family.extract_streams(
392426
stream=detect_data, length=length, prepick=prepick)
393427
for key, detect_stream in detect_streams_dict.items():
@@ -419,7 +453,7 @@ def lag_calc(detections, detect_data, template_names, templates,
419453
shift_len=0.2, min_cc=0.4, min_cc_from_mean_cc_factor=None,
420454
horizontal_chans=['E', 'N', '1', '2'],
421455
vertical_chans=['Z'], cores=1, interpolate=False,
422-
plot=False, plotdir=None, export_cc=False, cc_dir=None):
456+
plot=False, plotdir=None, export_cc=False, cc_dir=None, **kwargs):
423457
"""
424458
Cross-correlation derived picking of seismic events.
425459
@@ -557,7 +591,7 @@ def lag_calc(detections, detect_data, template_names, templates,
557591
detections=template_detections,
558592
template=Template(
559593
name=template_name, st=template,
560-
samp_rate=template[0].stats.sampling_rate))
594+
samp_rate=template[0].stats.sampling_rate, prepick=0.0))
561595
# Make a sparse template
562596
if len(template_detections) > 0:
563597
template_dict = xcorr_pick_family(
@@ -566,7 +600,7 @@ def lag_calc(detections, detect_data, template_names, templates,
566600
horizontal_chans=horizontal_chans,
567601
vertical_chans=vertical_chans, interpolate=interpolate,
568602
cores=cores, shift_len=shift_len, plot=plot, plotdir=plotdir,
569-
export_cc=export_cc, cc_dir=cc_dir)
603+
export_cc=export_cc, cc_dir=cc_dir, **kwargs)
570604
initial_cat.update(template_dict)
571605
# Order the catalogue to match the input
572606
output_cat = Catalog()

eqcorrscan/core/match_filter/detection.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -152,13 +152,6 @@ def __gt__(self, other):
152152
def __ge__(self, other):
153153
return not self.__lt__(other)
154154

155-
def __hash__(self):
156-
"""
157-
Cannot hash Detection objects, they may change.
158-
:return: 0
159-
"""
160-
return 0
161-
162155
def __ne__(self, other):
163156
return not self.__eq__(other)
164157

eqcorrscan/core/match_filter/family.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -304,7 +304,8 @@ def _uniq(self):
304304
305305
.. rubric:: Example
306306
307-
>>> from eqcorrscan import Template, Detection
307+
>>> from eqcorrscan import Template, Detection, Family
308+
>>> from obspy import UTCDateTime
308309
>>> family = Family(
309310
... template=Template(name='a'), detections=[
310311
... Detection(template_name='a', detect_time=UTCDateTime(0),
@@ -618,7 +619,7 @@ def lag_calc(self, stream, pre_processed, shift_len=0.2, min_cc=0.4,
618619
min_cc_from_mean_cc_factor=min_cc_from_mean_cc_factor,
619620
vertical_chans=vertical_chans, cores=cores,
620621
interpolate=interpolate, plot=plot, plotdir=plotdir,
621-
export_cc=export_cc, cc_dir=cc_dir)
622+
export_cc=export_cc, cc_dir=cc_dir, **kwargs)
622623
catalog_out = Catalog([ev for ev in picked_dict.values()])
623624
for detection_id, event in picked_dict.items():
624625
for pick in event.picks:

eqcorrscan/core/match_filter/matched_filter.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -201,6 +201,7 @@ def _group_detect(templates, stream, threshold, threshold_type, trig_int,
201201
n_groups += 1
202202
else:
203203
n_groups = 1
204+
kwargs.update({'peak_cores': kwargs.get('peak_cores', process_cores)})
204205
for st_chunk in streams:
205206
chunk_start, chunk_end = (min(tr.stats.starttime for tr in st_chunk),
206207
max(tr.stats.endtime for tr in st_chunk))
@@ -226,8 +227,7 @@ def _group_detect(templates, stream, threshold, threshold_type, trig_int,
226227
xcorr_func=xcorr_func, concurrency=concurrency,
227228
threshold=threshold, threshold_type=threshold_type,
228229
trig_int=trig_int, plot=plot, plotdir=plotdir, cores=cores,
229-
full_peaks=full_peaks, peak_cores=process_cores,
230-
**kwargs)
230+
full_peaks=full_peaks, **kwargs)
231231
for template in template_group:
232232
family = Family(template=template, detections=[])
233233
for detection in detections:

0 commit comments

Comments
 (0)