-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathveriPower.py
executable file
·313 lines (286 loc) · 10.8 KB
/
veriPower.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
#!/usr/bin/env python
###################################################################################################
#
#veriPower.py
#
#This program compares DQA power levels to those computed by obspy
#
#We use the following functions
#
#getstations()
#querydatabase()
#computePSD()
#getPAZ2()
#runmulti()
#
###################################################################################################
import urllib2
import os
import psycopg2
import glob
import numpy
from obspy.core import read, UTCDateTime, Stream
from obspy.signal import pazToFreqResp
from obspy.signal.spectral_estimation import get_NLNM
from obspy.xseed import Parser
from time import gmtime, strftime
from matplotlib.mlab import psd
from math import pi
from multiprocessing import Pool
def getstations(sp,dateval):
#A function to get a station list along with channels and locations
stations = []
for cursta in sp.stations:
#As we scan through blockettes we need to find blockettes 50
for blkt in cursta:
if blkt.id == 50:
#Pull the station info for blockette 50
stacall = blkt.station_call_letters.strip()
if debug:
print 'Here is a station in the dataless: ' + stacall
if blkt.id == 52:
if blkt.channel_identifier[:2] == 'LH':
if blkt.start_date <= dateval:
if len(str(blkt.end_date)) == 0 or dateval <= blkt.end_date:
stations.append(' '.join([stacall, blkt.location_identifier, blkt.channel_identifier]))
return stations
def querydatabase(sta,chan,loc,dateval):
#Here is the database query
metric1 = 'NLNMDeviationMetric:4-8'
metric2 = 'NLNMDeviationMetric:18-22'
metric3 = 'NLNMDeviationMetric:90-110'
metric4 = 'NLNMDeviationMetric:200-500'
#Here we query the database
connString = open('db.config', 'r').readline()
if debug:
print 'Connecting to', connString.split(',')[0]
try:
host, user, pwd, db, port = connString.split(',')
conn = psycopg2.connect(host=host, user=user, password=pwd, database=db, port=port)
queryString = """
SELECT "tblGroup".name, tblStation.name, tblChannel.name, tblMetric.name, value, tblDate.date
FROM tblstation
JOIN tblSensor on fkStationID = pkStationID
JOIN tblChannel on fkSensorID = pksensorID
JOIN tblMetricData on fkChannelID = pkChannelID
JOIN tblMetric on fkMetricID = pkMetricID
JOIN "tblGroup" on fkNetworkID = pkGroupID
JOIN tblDate on tblMetricData.date = pkdateid
WHERE tblStation.name = %s
AND tblChannel.name = %s
AND tblMetric.name = %s
AND tblDate.date = %s
AND tblSensor.location = %s
LIMIT 100
"""
date = str(dateval.year) + "-" + str(dateval.month).zfill(2) + "-" + \
str(dateval.day).zfill(2)
cursor = conn.cursor()
cursor.execute(queryString,(sta,chan,metric1,date,loc))
psdvalue1 = round(float(cursor.fetchone()[4]),2)
cursor.execute(queryString,(sta,chan,metric2,date,loc))
psdvalue2 = round(float(cursor.fetchone()[4]),2)
cursor.execute(queryString,(sta,chan,metric3,date,loc))
psdvalue3 = round(float(cursor.fetchone()[4]),2)
cursor.execute(queryString,(sta,chan,metric4,date,loc))
psdvalue4 = round(float(cursor.fetchone()[4]),2)
if debug:
print 'Here is our spectra value1: ' + str(psdvalue1)
print 'Here is our spectra value2: ' + str(psdvalue2)
print 'Here is our spectra value3: ' + str(psdvalue3)
print 'Here is our spectra value4: ' + str(psdvalue4)
conn.close()
except:
psdvalue1 = 0
psdvalue2 = 0
psdvalue3 = 0
psdvalue4 = 0
return psdvalue1, psdvalue2, psdvalue3, psdvalue4
def computePSD(sp,net,sta,loc,chan,dateval):
#Here we compute the PSD
lenfft = 5000
lenol = 2500
#Here are the different period bands. These could be done as a dicitionary
#if Adam B. wants to clean this up using a dicitionary that is fine with me
permin1 = 4
permax1 = 8
permin2 = 18
permax2 = 22
permin3 = 90
permax3 = 110
permin4 = 200
permax4 = 500
try:
#Get the response and compute amplitude response
paz=getPAZ2(sp,net,sta,loc,chan,dateval)
respval = pazToFreqResp(paz['poles'],paz['zeros'],paz['sensitivity']*paz['gain'], \
t_samp = 1, nfft = lenfft,freq=False)[1:]
respval = numpy.absolute(respval*numpy.conjugate(respval))
#Get the data to compute the PSD
if net in set(['IW','NE','US']):
locStr = '/xs1'
else:
locStr = '/xs0'
readDataString = locStr + '/seed/' + net + '_' + sta + '/' + str(dateval.year) + \
'/' + str(dateval.year) + '_' + str(dateval.julday).zfill(3) + '_' + net + \
'_' + sta + '/' + loc + '_' + chan + '*.seed'
datafiles = glob.glob(readDataString)
st = Stream()
for datafile in datafiles:
st += read(datafile)
st.merge(method=-1)
#Compute the PSD
cpval,fre = psd(st[0].data,NFFT=lenfft,Fs=1,noverlap=lenol,scale_by_freq=True)
per = 1/fre[1:]
cpval = 10*numpy.log10(((2*pi*fre[1:])**2)*cpval[1:]/respval)
perminind1 = numpy.abs(per-permin1).argmin()
permaxind1 = numpy.abs(per-permax1).argmin()
perminind2 = numpy.abs(per-permin2).argmin()
permaxind2 = numpy.abs(per-permax2).argmin()
perminind3 = numpy.abs(per-permin3).argmin()
permaxind3 = numpy.abs(per-permax3).argmin()
perminind4 = numpy.abs(per-permin4).argmin()
permaxind4 = numpy.abs(per-permax4).argmin()
perNLNM,NLNM = get_NLNM()
perNLNMminind1 = numpy.abs(perNLNM-permin1).argmin()
perNLNMmaxind1 = numpy.abs(perNLNM-permax1).argmin()
perNLNMminind2 = numpy.abs(perNLNM-permin2).argmin()
perNLNMmaxind2 = numpy.abs(perNLNM-permax2).argmin()
perNLNMminind3 = numpy.abs(perNLNM-permin3).argmin()
perNLNMmaxind3 = numpy.abs(perNLNM-permax3).argmin()
perNLNMminind4 = numpy.abs(perNLNM-permin4).argmin()
perNLNMmaxind4 = numpy.abs(perNLNM-permax4).argmin()
cpval1 = round(numpy.average(cpval[permaxind1:perminind1]) - \
numpy.average(NLNM[perNLNMmaxind1:perNLNMminind1]),2)
cpval2 = round(numpy.average(cpval[permaxind2:perminind2]) - \
numpy.average(NLNM[perNLNMmaxind2:perNLNMminind2]),2)
cpval3 = round(numpy.average(cpval[permaxind3:perminind3]) - \
numpy.average(NLNM[perNLNMmaxind3:perNLNMminind3]),2)
cpval4 = round(numpy.average(cpval[permaxind4:perminind4]) - \
numpy.average(NLNM[perNLNMmaxind4:perNLNMminind4]),2)
except:
cpval1 = 0
cpval2 = 0
cpval3 = 0
cpval4 = 0
return cpval1, cpval2, cpval3,cpval4
def getPAZ2(sp,net,sta,loc,chan,dateval):
#Here we get the response information
debuggetPAZ2 = False
data = {}
station_flag = False
channel_flag = False
for statemp in sp.stations:
for blockette in statemp:
if blockette.id == 50:
station_flag = False
if net == blockette.network_code and sta == blockette.station_call_letters:
station_flag = True
if debuggetPAZ2:
print 'We found the station blockettes'
elif blockette.id == 52 and station_flag:
channel_flag = False
if blockette.location_identifier == loc and blockette.channel_identifier == chan:
if debuggetPAZ2:
print 'We are in the location and channel blockette'
print 'End date: ' + str(blockette.end_date)
print 'Start date: ' + str(blockette.start_date)
if type(blockette.end_date) is str:
curdoy = strftime("%j",gmtime())
curyear = strftime("%Y",gmtime())
curtime = UTCDateTime(curyear + "-" + curdoy + "T00:00:00.0")
if blockette.start_date <= dateval:
channel_flag = True
if debuggetPAZ2:
print 'We found the channel blockette'
elif blockette.start_date <= dateval and blockette.end_date >= dateval:
channel_flag = True
if debuggetPAZ2:
print 'We found the channel blockette'
elif blockette.id == 58 and channel_flag and station_flag:
if blockette.stage_sequence_number == 0:
data['sensitivity'] = blockette.sensitivity_gain
elif blockette.stage_sequence_number == 1:
data['seismometer_gain'] = blockette.sensitivity_gain
elif blockette.stage_sequence_number == 2:
data['digitizer_gain'] = blockette.sensitivity_gain
elif blockette.stage_sequence_number == 3:
if not 'digitizer_gain' in data.keys():
data['digitizer_gain'] = blockette.sensitivity_gain
else:
data['digitizer_gain'] *= blockette.sensitivity_gain
elif blockette.id == 53 and channel_flag and station_flag:
if not 'gain' in data.keys():
data['gain'] = blockette.A0_normalization_factor
else:
data['gain'] *= blockette.A0_normalization_factor
if debuggetPAZ2:
print 'Here is the gain: ' + str(blockette.A0_normalization_factor)
if not 'poles' in data.keys():
data['poles'] = []
if not blockette.transfer_function_types in set(['A','B']):
msg = 'Only supporting Laplace transform response ' + \
'type. Skipping other response information.'
warnings.warn(msg, UserWarning)
continue
if blockette.transfer_function_types == 'B':
schan = 1
else:
schan = 1
if debuggetPAZ2:
print 'Here are the number of poles: ' + str(blockette.number_of_complex_poles)
print 'Here are the number of zeros: ' + str(blockette.number_of_complex_zeros)
for i in range(blockette.number_of_complex_poles):
p = complex(schan*blockette.real_pole[i], schan*blockette.imaginary_pole[i])
data['poles'].append(p)
if not 'zeros' in data.keys():
data['zeros'] = []
for i in range(blockette.number_of_complex_zeros):
z = complex(schan*blockette.real_zero[i], schan*blockette.imaginary_zero[i])
data['zeros'].append(z)
return data
def runmulti(slc):
try:
slc = slc.split()
sta = slc[0]
loc = slc[1]
chan = slc[2]
#Here we try to get the database values as well as compute the values
dbres1, dbres2, dbres3, dbres4 = querydatabase(sta, chan, loc, dateval)
ppsdvalue1, ppsdvalue2, ppsdvalue3, ppsdvalue4 =computePSD(sp,net,sta,loc,chan,dateval)
#Now we write the results
f = open(net + 'results.csv','a')
f.write(sta + ',' + loc + ',' + chan + ',' + str(dateval.year) + ',' + str(dateval.julday).zfill(3) + ',' + \
str(dbres1) + ',' + str(ppsdvalue1) + ',' + \
str(dbres2) + ',' + str(ppsdvalue2) + ',' + \
str(dbres3) + ',' + str(ppsdvalue3) + ',' + \
str(dbres4) + ',' + str(ppsdvalue4) + '\n')
f.close()
except:
print 'Problem with: ' + sta + ' ' + str(dateval.year) + \
' ' + str(dateval.julday).zfill(3)
return
print 'Scan started on', strftime('%Y-%m-%d %H:%M:%S', gmtime()), 'UTC'
if __name__ == '__main__':
#Main function
debug = True
#Here is the network we read the dataless from
net ='US'
datalessloc = '/APPS/metadata/SEED/'
try:
sp = Parser(datalessloc + net + '.dataless')
except:
print 'Can not read the dataless'
exit(0)
#The start day we are going to get power values from
datevalstart = UTCDateTime('2014-150T00:00:00.0')
#How many days we plan to run through
daystogo = 30
for ind in range(daystogo):
dateval = datevalstart + ind*24*60*60
#Get station list with location and channels for the current day
stationList = getstations(sp,dateval)
pool = Pool()
#Run the function for that day
pool.map(runmulti,stationList)
print 'Scan terminated on', strftime('%Y-%m-%d %H:%M:%S', gmtime()), 'UTC'