Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def picks_no_origin(self):
""" create a events that has picks but no origin """
t0 = UTCDateTime("2016-01-01T10:12:15.222")
def wave_id(seed_str):
return ev.WaveformStreamID(seed_string=seed_str)
picks = [
ev.Pick(time=t0 + 2, waveform_id=wave_id("UU.TMU..HHZ")),
ev.Pick(time=t0 + 1.2, waveform_id=wave_id("UU.BOB.01.ELZ")),
ev.Pick(time=t0 + 3.2, waveform_id=wave_id("UU.TEX..EHZ")),
]
return picks_to_dataframe(ev.Event(picks=picks))
def __copy_inventory_metadata(self, other):
"""
Will be called after two inventory objects have been merged. It
attempts to assure that inventory meta information is somewhat
correct after the merging.
The networks in other will have been moved to self.
"""
# The creation time is naturally adjusted to the current time.
self.created = obspy.UTCDateTime()
# Merge the source.
srcs = [self.source, other.source]
srcs = [_i for _i in srcs if _i]
all_srcs = []
for src in srcs:
all_srcs.extend(src.split(","))
if all_srcs:
src = sorted(list(set(all_srcs)))
self.source = ",".join(src)
else:
self.source = None
# Do the same with the sender.
sndrs = [self.sender, other.sender]
sndrs = [_i for _i in sndrs if _i]
# Inputs
times_dir = '/lfs/1/ceyoon/TimeSeries/ItalyDay/day303/network_detection/'
[det_start_ind, det_end_ind, dL, nevents, nsta, tot_ndets, max_ndets, tot_vol, max_vol, peaksum, num_sta, diff_ind] = np.loadtxt(times_dir+'sort_nsta_peaksum_22sta_2stathresh_FinalUniqueNetworkDetectionTimes.txt', unpack=True)
out_dir = times_dir+'22sta_2stathresh_NetworkWaveformPlots/'
if not os.path.exists(out_dir):
os.makedirs(out_dir)
# Times
dt_fp = 1.2
det_times = dt_fp * det_start_ind
diff_times = dt_fp * diff_ind
dL_dt = dt_fp * dL
print len(det_times)
# Window length (seconds) for event plot
init_time = UTCDateTime('2016-10-29T00:00:06.840000') # global start time for all channels
wtime_before = 10
wtime_after = 50
# Plot dimensions
out_width = 800
out_height = 2000
# Read in data and plot
ts_dir = '/lfs/1/ceyoon/TimeSeries/ItalyDay/day303/'
st = read(ts_dir+'Deci4.bp2to20.YR*HHZ*')
print len(st)
print st.__str__(extended=True)
for kk in range(IND_FIRST, IND_LAST):
ev_time = init_time + det_times[kk]
start_time = ev_time - wtime_before
end_time = ev_time + wtime_after
from __future__ import print_function
from math import log10
import numpy as np
from obspy.clients.arclink import Client
from obspy import Stream, UTCDateTime
from obspy.geodetics import gps2dist_azimuth
from obspy.signal.trigger import coincidence_trigger
client = Client(user="sed-workshop@obspy.org")
t = UTCDateTime("2012-04-03T01:00:00")
t2 = t + 4 * 3600
stations = ["AIGLE", "SENIN", "DIX", "LAUCH", "MMK", "SIMPL"]
st = Stream()
for station in stations:
try:
tmp = client.get_waveforms("CH", station, "", "[EH]HZ", t, t2,
metadata=True)
except Exception:
print(station, "---")
continue
st += tmp
st.taper()
st.filter("bandpass", freqmin=1, freqmax=20)
def plot_event(idx, ts):
min_ts, max_ts = find_min_max(ts)
segment = st.slice(UTCDateTime(min_ts) - 16, UTCDateTime(max_ts) + 16)
segment.plot(outfile='plots/new_%d.png' % idx, equal_scale=False, size=(1000, 600))
import numpy as np
import obspy
from obspy import UTCDateTime, read
import datetime
from obspy.fdsn import Client
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
# set server name. United States Geological Survey.
client = Client("IRIS")
# get current UTC date in "yy-mm-dd" format and set that as the end of the searched time-window
date_end = datetime.datetime.utcnow().date();
#convert end date from datetime object to a UTCDateTime object
end = UTCDateTime(date_end)
# set the time period to scan. in this case we're looking at the previous 10 days
no_of_days = 1000.0
# define time difference as a datetime object
number_days = datetime.timedelta(days=no_of_days)
# set start date for the time-window as the current date minus the number of days set
date_start = date_end - number_days
#convert start date from datetime object to a UTCDateTime object
start = UTCDateTime(date_start)
# set minimum magnitude threshold to search for.
min_mag = 2.0
"left_index", "right_index", "center_index", "channel_id",
"time_of_first_sample", "max_cc_value", "cc_shift_in_samples",
"cc_shift_in_seconds", "dlnA", "dt", "min_period",
"phase_arrivals", "absolute_starttime", "absolute_endtime",
"relative_starttime", "relative_endtime", "window_weight"])
missing_keys = necessary_keys.difference(set(win.keys()))
if missing_keys:
raise ValueError(
"Window JSON file misses the following keys:\n%s" %
", ".join(missing_keys))
new_win = Window(
left=win["left_index"], right=win["right_index"],
center=win["center_index"],
time_of_first_sample=obspy.UTCDateTime(
win["time_of_first_sample"]),
dt=win["dt"], min_period=win["min_period"],
channel_id=win["channel_id"])
new_win.max_cc_value = win["max_cc_value"]
new_win.cc_shift = win["cc_shift_in_samples"]
new_win.dlnA = win["dlnA"]
new_win.phase_arrivals = win["phase_arrivals"]
return new_win
:param stop_time: Time at which the search for data is stopped and
``None`` is returned. If not specified, stops at ``1950-01-01T00``.
:rtype: :class:`~obspy.core.utcdatetime.UTCDateTime` or ``None``
"""
sds_type = sds_type or self.sds_type
seed_pattern = ".".join((network, station, location, channel))
if not self.has_data(
network=network, station=station, location=location,
channel=channel, sds_type=sds_type):
return None
stop_time = stop_time or UTCDateTime(1950, 1, 1)
st = None
time = UTCDateTime()
while not st:
if time < stop_time:
return None
filename = self._get_filename(
network=network, station=station, location=location,
channel=channel, time=time, sds_type=sds_type)
if os.path.isfile(filename):
try:
st = read(filename, format=self.format, headonly=True,
sourcename=seed_pattern)
except ObsPyMSEEDFilesizeTooSmallError:
# just ignore small MSEED files, in use cases working with
# near-realtime data these are usually just being created
# right at request time, e.g. when fetching current data
# right after midnight
def __init__(self, files, id=None, prefix="smi:local/qc",
starttime=None, endtime=None,
add_c_segments=True, add_flags=False,
waveform_type="seismic"):
"""
Reads the MiniSEED files and extracts the data quality metrics.
"""
self.data = Stream()
self.all_files = files
self.files = []
# Allow anything UTCDateTime can parse.
if starttime is not None:
starttime = UTCDateTime(starttime)
if endtime is not None:
endtime = UTCDateTime(endtime)
self.window_start = starttime
self.window_end = endtime
# We are required to exclude samples at T1. Therefore, shift the
# time window to the left by 1μs and set nearest_sample to False.
# This will force ObsPy to fall back to the sample left of the endtime
if endtime is not None:
endtime_left = endtime - 1e-6
else:
endtime_left = None
# Will raise if not a MiniSEED files.
for file in files:
st = read(file, starttime=starttime, endtime=endtime_left,
format="mseed", nearest_sample=False)
@installation_date.setter
def installation_date(self, value):
if value is None or isinstance(value, UTCDateTime):
self._installation_date = value
return
self._installation_date = UTCDateTime(value)