forked from taylor-kenyon/FluvialSeismology_Analysis
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathScreening_Tool.py
More file actions
161 lines (129 loc) · 6.86 KB
/
Screening_Tool.py
File metadata and controls
161 lines (129 loc) · 6.86 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
import obspy
from obspy import read, UTCDateTime
from obspy.io.xseed import Parser
from obspy.signal import PPSD
import numpy as np
import matplotlib.pyplot as plt
from obspy.imaging.cm import pqlx
import os
from obspy.clients.fdsn import Client
from obspy.clients import fdsn
from fractions import Fraction
import waveformUtils
import getData
import PPSD
import spectraPlay
import set_workdir
import importlib
# in case .py files have been updated
importlib.reload(waveformUtils)
importlib.reload(getData)
importlib.reload(PPSD)
importlib.reload(spectraPlay)
importlib.reload(set_workdir)
## main function to process data
def process_data(start_date, end_date, delta, stas):
"""Main function to run through each day of data in timeframe and perform analyses"""
# set station info
nowsta = stas[0]
network, station, location, channel = nowsta.split('.')
print(f"Station: {nowsta}")
# define path where .mseed files are stored
if network == "ZE":
path = f'C:/Users/zzawol/Documents/iris-data/seismic_data/NO{station}/{channel}'
elif network == "ZD":
path = f'C:/Users/zzawol/Documents/iris-data/infrasound_data/{station}'
else:
raise ValueError(f"Unknown network code: {network}")
# check if the path exists, create if not
if not os.path.exists(path):
print(f"Path '{path}' does not exist. Creating directory...")
os.makedirs(path, exist_ok=True)
else:
print(f"Path '{path}' already exists.")
file_pattern = f'{path}/{network}.{station}.{location}.{channel}*.mseed'
# loop to make RSAM, spectra, PPSD, and temporal plots for each day in time frame
current_date = start_date
while current_date < end_date:
t1 = current_date
t2 = current_date + delta # the next day
print(f"\nProcessing data for {t1} to {t2}")
try:
# set working directory to where getData.py is located - ensures checks for pre-downloaded data
set_workdir.set_workdir('getData.py')
# uses get_iris_data to ensure data for this time range is downloaded
print(f"Checking if any missing data for {t1} to {t2}, downloading if necessary.")
getData.get_iris_data(t1, t2, stas, path) # downloads any missing data if necessary
# reading local data for current date range
S = obspy.read(file_pattern, starttime=t1, endtime=t2) # obspy read
S1 = S.copy() # create a copy of the original stream for spectra
# read an extra 6 mins of data to extend the stream
spec_end = t2 + (6*60) # 6 minutes beyond original end time
try:
set_workdir.set_workdir('getData.py')
print(f"Checking if any missing data for {t2} to {spec_end}, downloading if necessary.")
getData.get_iris_data(t2, spec_end, stas, path) # downloads any missing data if necessary
# after calling get_iris_data, re-read local data (after ensuring it's downloaded)
S_ext = obspy.read(file_pattern, starttime=t2, endtime=spec_end) # read only the extra data
if network == "ZE" and S_ext[0].stats.sampling_rate != 350: # extra check
S_ext.resample(350)
elif network == "ZD" and S_ext[0].stats.sampling_rate != 100: # extra check
S_ext.resample(100)
else:
pass
# add extra 6 mins of data to copied stream
S1.extend(S_ext) # now S1 contains the original data + 6 minutes, and S is untouched
S1.merge(method=1) # avoid duplicates
except Exception as e:
print(f"An error occurred while processing data from {t2} to {spec_end}: {e}")
print("Making spectrogram")
# plot scectrogram and RSAM
set_workdir.set_workdir('spectraPlay.py')
f1 = spectraPlay.plot_spectrogram(S1, t1, spec_end, stas) # pass S1 to plot_spectrogram function
# ask user for frequency values after spectrogram
print("Enter frequency values for temporal plot (comma-separated, takes in fractions and decimals):")
user_inputs = input("Example (0.5, 50, 1/100) : ").split(',')
# process user input - converts fractions and regular floats
user_freqs = []
#user_periods = []
for freq in user_inputs:
try:
freq = freq.strip() # clean up extra spaces
if '/' in freq:
user_freqs.append(float(Fraction(freq))) # handle fraction
else:
user_freqs.append(float(freq)) # handle float
except ValueError:
print(f"Invalid input: {freq} Using default values.")
# if user didn't provide any valid values, set defaults
if not user_freqs:
print("No valid input detected. Using default frequency values.")
user_freqs = [120, 70, 20] # default frequencies in Hz
user_periods = [1 / freq for freq in user_freqs] # converts to periods for temporal plot
# plot PPSD
print("Making PPSD and Temporal Plots")
set_workdir.set_workdir('PPSD.py')
PPSD.plot_ppsd(S, t1, t2, stas, user_periods)
round_periods = ['%.4f' % per for per in user_periods]
print(f"Inputted frequencies: {user_freqs} --> periods: {round_periods}") # for checking
# print(user_values) # for checking
except Exception as e:
print(f"An error occurred while processing data from {t1} to {t2}: {e}")
current_date += delta # move to next day
# to run independently via .py file
if __name__ == "__main__":
# setting up parameters
# define start and end time for the data range
start_date = UTCDateTime(2023, 8, 17) # Start time
end_date = UTCDateTime(2023, 8, 18) # End time
delta = 86400 # 1 day in seconds
stas = [2301, 2302, 2303, 2304, 2305, 2306, 2307, 2308, 2309, 2310, 2311, 2312, 2313, 2314, 2315, 2316,
'G2301', 'G2302', 'G2303', 'G2304', 'G2305', 'G2306', 'G2307', 'G2308', 'G2309', 'G2310', 'G2311',
'G2312', 'G2313', 'G2314', 'G2315', 'G2316']
stas = ['ZE.2306..GPZ'] # station IDs
#stas = ['ZD.G2312.01.HDF'] # GEM station IDs
# call function
process_data(start_date, end_date, delta, stas)
print(f"Analysis for {stas} from {start_date} to {end_date} complete.")
# run in jupyter by: %run Screening_Tool.py
# or run in terminal by: python Screening_Tool.py