Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
try:
return func(segment, config)
finally:
# set back values if needed, even if we had exceptions.
# Any of these values might be also an exception. Call the
# 'private' attribute cause the relative method, if exists, most likely raises
# the exception, it does not return it
sn_windows = self.data.get('sn_windows', None)
if sn_windows is None:
try:
if len(segment.stream()) != 1:
raise ValueError(("Unable to get sn-windows: %d traces in stream "
"(possible gaps/overlaps)") % len(segment.stream()))
wndw = config['sn_windows']
arrival_time = \
UTCDateTime(segment.arrival_time) + wndw['arrival_time_shift']
self.data['sn_windows'] = sn_split(segment.stream()[0],
arrival_time,
wndw['signal_window'],
return_windows=True)
# self.data['sn_windows'] = segment.sn_windows(wndw['signal_window'],
# wndw['arrival_time_shift'])
except Exception as exc:
self.data['sn_windows'] = exc
if inventory is None:
invcache[segment] = segment.station._inventory # might be exc, or None
# reset segment stream to None, for safety: we do not know if it refers
# to a pre-processed stream or not, and thus segment._stream needs to be set from
# self.data each time we are here. Note that this should not be a problem as
# the web app re-initializes the session each time (thus each segment SHOULD have
# no _stream attribute), but for safety we remove it:
@staticmethod
def utcnow():
"""
Returns current UTC datetime.
"""
return UTCDateTime()
# Normalize all traces and throw out traces with no data.
try:
max_diff = max([trace.data.max() - trace.data.min() for trace in st \
if len(trace.data) > 0])
except:
pass
for trace in st:
if (np.ma.is_masked(trace.data) and not False in trace.data._mask)or\
len(trace.data) == 0:
trace.data = np.array([])
else:
trace.data = trace.data - trace.data.mean()
trace.data = trace.data / (max_diff / 2)
NV.stream = st
# Get the min. starttime and the max. endtime.
starttime = UTCDateTime(NV.starttime.get())
endtime = UTCDateTime(NV.endtime.get())
for trace in NV.stream:
if np.ma.is_masked(trace):
trace = trace.data[trace._mask] = np.NaN
# Loop over all traces again and fill with NaNs.
for trace in NV.stream:
startgaps = int(round((trace.stats.starttime - starttime) * \
trace.stats.sampling_rate))
endgaps = int(round((endtime - trace.stats.endtime) * \
trace.stats.sampling_rate))
print endgaps
if startgaps or endgaps:
if startgaps > 0:
start = np.empty(startgaps)
start[:] = np.NaN
else:
for key, value in kwargs.iteritems():
if value == None:
continue
col = getattr(WaveformChannel, key)
if '*' in value or '?' in value:
value = value.replace('?', '_')
value = value.replace('*', '%')
query = query.filter(col.like(value))
else:
query = query.filter(col == value)
results = query.all()
session.close()
adict = {}
for result in results:
key = '%s.%s.%s.%s' % (result[0], result[1], result[2], result[3])
adict[key] = UTCDateTime(result[4])
return adict
def changeTime(*args, **kwargs):
"""
Change the times of the plot.
"""
timedict = {'-1 h' :-60 * 60, '-10 min' :-10 * 60,
'Current': 'NOW',
'+10 min': 10 * 60,
'+1 h': 60 * 60}
timechange = timedict[args[0].widget.cget("text")]
if isinstance(timechange, int):
start = UTCDateTime(NV.starttime.get()) + timechange
end = UTCDateTime(NV.endtime.get()) + timechange
elif timechange == 'NOW':
end = UTCDateTime()
start = UTCDateTime() - 10 * 60
else:
import pdb;pdb.set_trace()
NV.starttime.set(start.strftime('%Y-%m-%dT%H:%M:%S'))
NV.endtime.set(end.strftime('%Y-%m-%dT%H:%M:%S'))
getWaveform()
# grids
search_grid_filename = os.path.join(base_path, 'lib',
opdict['search_grid'])
time_grids = get_interpolated_time_grids(opdict)
#start and end times
starttime = opdict['starttime']
endtime = opdict['endtime']
data_length = opdict['data_length']
data_overlap = opdict['data_overlap']
initial_start_time = utcdatetime.UTCDateTime(starttime)
initial_end_time = initial_start_time+data_length
final_end_time = utcdatetime.UTCDateTime(endtime)
time_shift_secs = data_length-data_overlap
######### FOR EACH TIME SPAN - DO MIGRATION #############
# start loop over time
start_time = initial_start_time
end_time = initial_end_time
if runtime:
t_ref = time()
while (start_time < final_end_time):
# read data
logging.info("Reading data : %s - %s." % (start_time.isoformat(),
>>> t2 = UTCDateTime(123.000000099)
>>> t1 >= t2
True
But the actual timestamp differ
>>> t1.timestamp >= t2.timestamp
False
Resetting the precision changes the behavior of the operator
>>> t1.precision = 11
>>> t1 >= t2
False
"""
if isinstance(other, UTCDateTime):
return round((self._ns - other._ns) / 1e9, self.__precision) >= 0
elif isinstance(other, float) or isinstance(other, int):
return round(self.timestamp - float(other), self.__precision) >= 0
elif isinstance(other, datetime.datetime):
return self.datetime >= other
return False
import logging
from obspy.core import read, utcdatetime, trace
from OP_waveforms import Waveform
from kurtogram import Fast_Kurtogram
logging.basicConfig(level=logging.INFO, format='%(levelname)s : %(asctime)s : %(message)s')
p=optparse.OptionParser()
p.add_option('--data_file', '-d', action='store',help="data filename")
p.add_option('--output_file','-o', action='store',help="output filename")
p.add_option('--starttime',action='store',help="start time for data e.g. 2010-10-14T00:00:00.0Z")
p.add_option('--endtime',action='store',help="end time for data e.g. 2010-10-14T10:00:00.0Z")
p.add_option('--verbose','-v',action='store_true',help='print debugging information to stout')
(options,arguments)=p.parse_args()
tdeb=utcdatetime.UTCDateTime(options.starttime)
tfin=utcdatetime.UTCDateTime(options.endtime)
if options.verbose:
logging.info('\n\
Input filename = %s\n\
Output basename = %s\n\
Start time = %s\n\
End time = %s\n' % (options.data_file, options.output_file, options.starttime, options.endtime))
# read waveform between time limits
wf=Waveform()
wf.read_from_file(options.data_file,starttime=tdeb,endtime=tfin)
dt=wf.delta
x=wf.values
print(wf.stream)
'location': location, 'channel': channel}
for key, value in kwargs.items():
if value is None:
continue
col = getattr(WaveformChannel, key)
if '*' in value or '?' in value:
value = value.replace('?', '_')
value = value.replace('*', '%')
query = query.filter(col.like(value))
else:
query = query.filter(col == value)
# start and end time
try:
starttime = UTCDateTime(starttime)
except Exception:
starttime = UTCDateTime() - 60 * 20
finally:
query = query.filter(WaveformChannel.endtime > starttime.datetime)
try:
endtime = UTCDateTime(endtime)
except Exception:
# 10 minutes
endtime = UTCDateTime()
finally:
query = query.filter(WaveformChannel.starttime < endtime.datetime)
results = query.all()
session.close()
# execute query
file_dict = {}
for result in results:
fname = os.path.join(result[0], result[1])
key = '%s.%s.%s.%s' % (result[2], result[3], result[4], result[5])
event.event_descriptions.append(description)
description = EventDescription(
type='Flinn-Engdahl region',
text=fe_region_number)
event.event_descriptions.append(description)
origin = Origin()
res_id = '/'.join((res_id_prefix, 'origin', evid))
origin.resource_id = ResourceIdentifier(id=res_id)
origin.creation_info = CreationInfo()
if source_code:
origin.creation_info.agency_id = source_code
else:
origin.creation_info.agency_id = 'USGS-NEIC'
res_id = '/'.join((res_id_prefix, 'earthmodel/ak135'))
origin.earth_model_id = ResourceIdentifier(id=res_id)
origin.time = UTCDateTime(date + time)
origin.latitude = latitude * self._coordinate_sign(lat_type)
origin.longitude = longitude * self._coordinate_sign(lon_type)
origin.depth = depth * 1000
origin.depth_type = 'from location'
origin.quality = OriginQuality()
origin.quality.associated_station_count = station_number
origin.quality.standard_error = standard_dev
# associated_phase_count can be incremented in records 'P ' and 'S '
origin.quality.associated_phase_count = 0
# depth_phase_count can be incremented in record 'S '
origin.quality.depth_phase_count = 0
origin.type = 'hypocenter'
origin.region = fe_region_name
event.origins.append(origin)
return event