Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_duplicate_picks_ok_if_rejected(self, cat1):
"""
Rejected picks should not count against duplicated
"""
cat = cat1.copy()
# get first non-rejected pick
for pick in cat1[0].picks:
if pick.evaluation_status != "rejected":
pick = pick.copy()
break
else:
raise ValueError("all picks rejected")
pick.resource_id = obspy.core.event.ResourceIdentifier(referred_object=pick)
pick.evaluation_status = "rejected"
cat[0].picks.append(pick)
# this should not raise
validate_catalog(cat)
def readArc(afile):
with open (afile, "r") as myfile:
arc=myfile.read().replace('$','')
arc=arc.split('\n')
oTime=_arcOrTime(arc[0][0:16])
Time=obspy.core.UTCDateTime(oTime).formatIRISWebService()
lat, lon=_arcLatLong(arc[0][16:23],arc[0][23:31])
depth=_arcDivide100(arc[0].split()[3])
mag=_arcMag(arc[0][70:73])
herr=_arcDivide100(arc[0][85:89])
verr=_arcDivide100(arc[0][89:93])
return([Time,oTime.timestamp,lat,lon,depth,mag,herr,verr])
@param params
seismon params dictionary
@param segment
[start,end] gps
"""
ifo = pylal.pylal_seismon_utils.getIfo(params)
gpsStart = segment[0]
gpsEnd = segment[1]
# set the times
duration = np.ceil(gpsEnd-gpsStart)
st = obspy.core.Stream()
tstart = pylal.pylal_seismon_utils.GPSToUTCDateTime(gpsStart)
tend = pylal.pylal_seismon_utils.GPSToUTCDateTime(gpsEnd)
for channel in params["channels"]:
# make timeseries
dataFull = pylal.pylal_seismon_utils.retrieve_timeseries(params, channel, segment)
if dataFull == []:
continue
dataFull = dataFull / channel.calibration
indexes = np.where(np.isnan(dataFull.data))[0]
meanSamples = np.mean(np.ma.masked_array(dataFull.data,np.isnan(dataFull.data)))
for index in indexes:
dataFull[index] = meanSamples
dataFull -= np.mean(dataFull.data)
def writeEndLine(m,Key):
mintime=obspy.core.UTCDateTime(np.min(Key.Ppick.values))
space=' '
hhmmssss=mintime.formatIRISWebService().replace('-','').replace('T','').replace(':','').replace('.','')[8:16]
print hhmmssss
print obspy.core.UTCDateTime(mintime)
lat,latminute=str(abs(int(m[0]))),str(abs(60*(m[0]-int(m[0])))).replace('.','')[0:4]
lon,lonminute=str(abs(int(m[1]))),str(abs(60*(m[1]-int(m[1])))).replace('.','')[0:4]
trialdepth=' 400'
endline="{:<6}{:<8}{:<3}{:<4}{:<4}{:<4}{:<5}\n".format(space,hhmmssss,lat,latminute,lon,lonminute,trialdepth)
return endline
Trace.sort()
if self.filt != None:
if self.decimate: # Decimate to the point so that sr is about 2.5 time high frequency
Trace.decimate(self.decimate)
# Trace[0].data=self._fftprep(Trace[0].data)
Trace.filter('bandpass', freqmin=self.filt[0], freqmax=self.filt[
1], corners=self.filt[2], zerophase=self.filt[3])
if trimtime != None:
deb(trimtime)
if self.trimSeconds == None:
Trace = Trace.slice(starttime=obspy.core.UTCDateTime(
trimtime[0]), endtime=obspy.core.UTCDateTime(trimtime[1]))
else:
Trace = Trace.slice(starttime=obspy.core.UTCDateTime(
trimtime[0]), endtime=obspy.core.UTCDateTime(trimtime[0]) + self.trimSeconds)
Trace.sort()
Trace.detrend()
return Trace
def _parse_event_and_station(self):
"""
Parse the event and station information.
"""
# Parse the event.
if self.event and not isinstance(self.event, Event):
# It might be an ObsPy event catalog.
if isinstance(self.event, obspy.core.event.Catalog):
if len(self.event) != 1:
raise PyflexError("The event catalog must contain "
"exactly one event.")
self.event = self.event[0]
# It might be an ObsPy event object.
if isinstance(self.event, obspy.core.event.Event):
if not self.event.origins:
raise PyflexError("Event does not contain an origin.")
origin = self.event.preferred_origin() or self.event.origins[0]
self.event = Event(latitude=float(origin.latitude),
longitude=float(origin.longitude),
depth_in_m=float(origin.depth),
origin_time=origin.time)
else:
raise PyflexError("Could not parse the event. Unknown type.")
def _getRA(self,ftc,evedf,sta):
CorDF=pd.DataFrame(index=evedf.Event.values,columns=['Xcor','STALTA','TimeStamp','SampRate','MaxCC','MaxSTALTA','threshold','Nc'])
#CorDF['FilesToCorr']=FilesToCorr
conStream=self._applyFilter(obspy.core.read(ftc),condat=True)
if not isinstance(conStream,obspy.core.stream.Stream):
return None,None
CorDF['Nc']=len(list(set([x.stats.channel for x in conStream])))
CorDF['SampRate']=conStream[0].stats.sampling_rate
MPcon,TR=self.multiplex(conStream,evedf.Nc.median(),retTR=True)
CorDF['TimeStamp']=min([x.stats.starttime.timestamp for x in TR])
#get continous data parameters for Xcor
MPconFD=scipy.fftpack.fft(MPcon,n=2**int(evedf.reqlen.median()).bit_length())
n = int(np.median([len(x) for x in evedf.MPtem])) ##TODO This assumes all templates are of equal length, if not will break
a = pd.rolling_mean(MPcon, n)[n-1:]
b = pd.rolling_std(MPcon, n)[n-1:]
b *= np.sqrt((n-1.0) / n)
for corevent,corrow in CorDF.iterrows():
evrow=evedf[evedf.Event==corevent].iloc[0]
CorDF.threshold[corevent]=evrow.threshold
def get_ds(network, station, location, channel, starttime, length):
from obspy.clients.fdsn import Client
t0 = obspy.core.UTCDateTime(starttime)
t1 = t0 + length
try:
c = Client(base_url='http://service.iris.edu',
user="azevedo@passcal.nmt.edu",
password="haL8muerte",
timeout=20,
debug=DEBUG)
except Exception as e:
print e.message
try:
stream = None
stream = c.get_waveforms(network,
station,
location,
channel,
t0, t1)