Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
params=None):
maxlag = params.maxlag
cc_sampling_rate = params.goal_sampling_rate
sac_format = params.sac_format
if sac_format is None:
sac_format = get_config(db, "sac_format")
if maxlag is None:
maxlag = float(get_config(db, "maxlag"))
if cc_sampling_rate is None:
cc_sampling_rate = float(get_config(db, "cc_sampling_rate"))
try:
os.makedirs(os.path.split(filename)[0])
except:
pass
filename += ".SAC"
mytrace = Trace(data=corr)
mytrace.stats['station'] = pair
mytrace.stats['sampling_rate'] = cc_sampling_rate
if maxlag:
mytrace.stats.starttime = -maxlag
mytrace.stats.sac = AttribDict()
mytrace.stats.sac.depmin = np.min(corr)
mytrace.stats.sac.depmax = np.max(corr)
mytrace.stats.sac.depmen = np.mean(corr)
mytrace.stats.sac.scale = 1
mytrace.stats.sac.npts = len(corr)
st = Stream(traces=[mytrace, ])
st.write(filename, format='SAC')
del st
return
.. rubric:: Example
>>> from obspy.core import read # doctest: +SKIP
>>> st = read("/path/to/test.sac") # doctest: +SKIP
"""
# read SAC file
t = SacIO()
if headonly:
t.ReadSacHeader(filename)
else:
t.ReadSacFile(filename)
# assign all header entries to a new dictionary compatible with an ObsPy
header = t.get_obspy_header()
if headonly:
tr = Trace(header=header)
else:
tr = Trace(header=header, data=t.seis)
return Stream([tr])
if key in SH_KEYS_INT:
header['sh'][key] = int(value)
elif key in SH_KEYS_FLOAT:
header['sh'][key] = float(value)
else:
header['sh'][key] = value
# set channel code
header['channel'] = ''.join(channel)
# remember record number
header['sh']['RECNO'] = len(stream) + 1
if headonly:
# skip data
stream.append(Trace(header=header))
else:
if not npts:
stream.append(Trace(header=header))
continue
# read data
data = fh_data.read(npts * 4)
dtype = byteorder + 'f4'
data = np.fromstring(data, dtype=dtype)
# convert to system byte order
data = np.require(data, '=f4')
stream.append(Trace(data=data, header=header))
if not headonly:
fh_data.close()
return stream
>>> from obspy.core import read # doctest: +SKIP
>>> st = read("/path/to/test.sac") # doctest: +SKIP
"""
# read SAC file
t = SacIO(debug_headers=debug_headers)
if headonly:
t.ReadSacHeader(filename)
else:
t.ReadSacFile(filename)
# assign all header entries to a new dictionary compatible with an ObsPy
header = t.get_obspy_header()
if headonly:
tr = Trace(header=header)
else:
tr = Trace(header=header, data=t.seis)
return Stream([tr])
month = int(temp[17:19])
day = int(temp[20:22])
hour = int(temp[23:25])
mins = int(temp[26:28])
secs = float(temp[29:35])
header['starttime'] = UTCDateTime(year, month, day, hour, mins) + secs
if headonly:
# skip data
fh.seek(dlen * (header['npts'] + 2), 1)
stream.append(Trace(header=header))
else:
# fetch data
data = np.fromfile(fh, dtype=dtype, count=header['npts'] + 2)
# convert to system byte order
data = np.require(data, stype)
stream.append(Trace(data=data[2:], header=header))
return stream
... | 2005-12-19T15:07:54.000000Z - ... | 4000.0 Hz, 8000 samples
"""
# Read file to the internal segy representation.
su_object = readSUFile(filename, endian=byteorder,
unpack_headers=unpack_trace_headers)
# Create the stream object.
stream = Stream()
# Get the endianness from the first trace.
endian = su_object.traces[0].endian
# Loop over all traces.
for tr in su_object.traces:
# Create new Trace object for every segy trace and append to the Stream
# object.
trace = Trace()
stream.append(trace)
# skip data if headonly is set
if headonly:
trace.stats.npts = tr.npts
else:
trace.data = tr.data
trace.stats.su = AttribDict()
# If all values will be unpacked create a normal dictionary.
if unpack_trace_headers:
# Add the trace header as a new attrib dictionary.
header = AttribDict()
for key, value in tr.header.__dict__.iteritems():
setattr(header, key, value)
# Otherwise use the LazyTraceHeaderAttribDict.
else:
# Add the trace header as a new lazy attrib dictionary.
import matplotlib.pyplot as plt
import numpy as np
import scipy.integrate
import scipy.signal
# from mystream import read
log = logging.getLogger(__name__)
# statshf = 'sampling_rate delta calib npts network location station channel starttime'
# statshf_r = statshf + ' endtime'
# shhf_int = 'SIGN EVENTNO MARK'
# shhf_float = 'DISTANCE AZIMUTH SLOWNESS INCI DEPTH MAGNITUDE LAT LON SIGNOISE PWDW DCVREG DCVINCI'
# shhf_str = 'COMMENT OPINFO FILTER QUALITY BYTEORDER P-ONSET S-ONSET ORIGIN'
# shhf = ' '.join([shhf_int, shhf_float, shhf_str])
class Trace(ObsPyTrace):
"""
Class derieved from obspy.core.Trace with some additional functionality.
"""
# @classmethod
# def read(cls, pathname_or_url, format=None, headonly=False, ** kwargs):
# """Read first trace of waveform file into an Trace object.
#
# If there are more than one trace, tries to glue them together.
# See obspy.core.read.
# """
#
# mystream = read(pathname_or_url, format=None, headonly=False,
# ** kwargs)
# mytrace = mystream[0]
# if len(mystream) > 1:
endian = segy_object.traces[0].endian
textual_file_header_encoding = segy_object.textual_header_encoding.upper()
# Add the file wide headers.
stream.stats.textual_file_header = textual_file_header
stream.stats.binary_file_header = binary_file_header
# Also set the data encoding, endianness and the encoding of the
# textual_file_header.
stream.stats.data_encoding = data_encoding
stream.stats.endian = endian
stream.stats.textual_file_header_encoding = \
textual_file_header_encoding
# Loop over all traces.
for tr in segy_object.traces:
# Create new Trace object for every segy trace and append to the Stream
# object.
trace = Trace()
stream.append(trace)
# skip data if headonly is set
if headonly:
trace.stats.npts = tr.npts
else:
trace.data = tr.data
trace.stats.segy = AttribDict()
# If all values will be unpacked create a normal dictionary.
if unpack_trace_headers:
# Add the trace header as a new attrib dictionary.
header = AttribDict()
for key, value in tr.header.__dict__.iteritems():
setattr(header, key, value)
# Otherwise use the LazyTraceHeaderAttribDict.
else:
# Add the trace header as a new lazy attrib dictionary.
stats[key] = val = val.decode('utf-8')
if _is_utc(val):
stats[key] = UTC(val)
elif key == 'processing':
# this block is only necessary for files written with old obspyh5
# versions (< 0.5.0)
stats[key] = json.loads(val)
jsondata = stats.pop('_json', None)
if jsondata is not None:
for k, v in json.loads(jsondata).items():
stats[k] = v
if headonly:
stats['npts'] = len(dataset)
trace = Trace(header=stats)
else:
trace = Trace(data=dataset[...], header=stats)
return trace