Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
self.execute(repeat=self.warmup, unbind=False)
# we want at least this many blocks
block_slots = _get_sm_count()
# loops for given size of N
loopsN = max(self.params[0] // 4, 1)
# don't bother with internal mode for N>4
modes = (0,1) if self.params[0] <= 4 else (1,)
gys = float(self.GYS)
gxs = float(self.GYS)
small_set = gys * gxs <= 512
# TODO: this needs more pruning, it takes too long for large HW
results = []
sys.stdout.write("Autotune " + native_str(self))
progress = 0
for threshold in (True, False):
for external in modes:
for strideY in range(1, self.GYS + 1):
for strideX in range(1, self.GXS + 1):
if progress % 32 == 0:
sys.stdout.write('.')
sys.stdout.flush()
progress += 1
# CRSK copies in determ mode
outputs = strideY * strideX
# minimal occupancy filter
blocks = self.blocksCK * strideY * strideX
# gemm loop size filter
def test_read_from_buffer():
s = native_str("2\nmy xyz file\n H 1.0 1.0 1.0\n H 1.0 2.0 1.0\n")
buffer = native_str_buffer(s)
h2 = mdt.read(buffer, format='xyz')
assert h2.num_atoms == 2
def __init__(self, op, lib, dtype,
N, C, K,
H, W, P, Q,
pad_h, pad_w, filter_extern=None, bprop=False):
super(XpropWinograd_2x2_3x3, self).__init__(lib, dtype,
N, C, K, 1, H, W, 1, 3, 3, 1, P, Q,
0, pad_h, pad_w, 1,1,1, 1,1,1, bprop)
SMs = _get_sm_count()
self.autotune_key = " ".join(native_str(x) for x in (op + "_2x2_3x3",
SMs, dtype.itemsize, N, C, K, H, W, P, Q))
# insert Python version in filename to avoid Py2/Py3 incompatibilities in shelve
self.autotune_db_file = os.path.join(lib.cache_dir, "autotune%d.db" % sys.version_info[0])
# allow for .5 seconds worth of warmup when autotuning
# assume 10 Tflops on 24 SMs
self.warmup = min(max(int(5e12 / (P * Q * K * N * C * 9 * 2.0) * (SMs / 24.0)), 1), 1000)
if filter_extern is None:
self.init()
else:
# allow manual override for unit testing
self.initialized = True
self.init(autotune=1, filter_extern=filter_extern)
lib.set_scratch_size(self.filter_trans.size, self.bsum.size)
if "NVPROF_ID" in os.environ:
for frame in tb.extract_stack():
if nrv_re.search(frame[0]):
break
caller = frame[0:2]
file_path, file_name = os.path.split(caller[0])
path1, path2 = os.path.split(file_path)
file_base, ext = os.path.splitext(file_name)
for name in (path2, file_base, ext):
name = name_re.sub("", name)
if name:
names.append(name)
names.append(native_str(caller[1]))
return names
except IOError:
raise SacIOError("No such file: " + fname)
data = [_i.rstrip(b"\n\r") for _i in data.splitlines(True)]
if len(data) < 14 + 8 + 8:
raise SacIOError("%s is not a valid SAC file:" % fname)
# --------------------------------------------------------------
# parse the header
#
# The sac header has 70 floats, 40 integers, then 192 bytes
# in strings. Store them in array (and convert the char to a
# list). That's a total of 632 bytes.
# --------------------------------------------------------------
# read in the float values
self.hf = np.array([i.split() for i in data[:14]],
dtype=native_str('
if name == "resource_id" and value is not None:
self.resource_id.setReferredObject(self)
class AbstractEventTypeWithResourceID(AbstractEventType):
def __init__(self, force_resource_id=True, *args, **kwargs):
kwargs["force_resource_id"] = force_resource_id
super(AbstractEventTypeWithResourceID, self).__init__(*args,
**kwargs)
if "resource_id" in [item[0] for item in class_attributes]:
base_class = AbstractEventTypeWithResourceID
else:
base_class = AbstractEventType
# Set the class type name.
setattr(base_class, "__name__", native_str(class_name))
return base_class
self.hi = frombuffer(f.read(4 * 40), dtype=native_str('f4'))
self.hi = frombuffer(f.read(4 * 40), dtype=native_str('>i4'))
# read in the char values
self.hs = frombuffer(f.read(24 * 8), dtype=native_str('|S8'))
self.IsSACfile(fname)
self.byteorder = 'big'
except SacError as e:
self.hf = self.hi = self.hs = None
f.close()
raise SacError(e)
try:
self._get_date()
except SacError:
warnings.warn('Cannot determine date')
if self.GetHvalue('lcalda'):
try:
self._get_dist()
except SacError:
def _to_array(self):
"""
Store all attributes for serialization in a structured array.
"""
dtypes = [(native_str('debug'), np.bool_),
(native_str('bot_depth'), np.float_),
(native_str('dist'), np.float_, self.dist.shape),
(native_str('is_p_wave'), np.bool_),
(native_str('max_ray_param'), np.float_),
(native_str('min_ray_param'), np.float_),
(native_str('min_turn_ray_param'), np.float_),
(native_str('tau'), np.float_, self.tau.shape),
(native_str('time'), np.float_, self.time.shape),
(native_str('top_depth'), np.float_)]
arr = np.empty(shape=(), dtype=dtypes)
for dtype in dtypes:
key = dtype[0]
arr[key] = getattr(self, key)
return arr
# If successful, y should contian an integer!
"""
# Create a partially bound method that's closed over the variables we want to store.
def method(self, *arguments, **kwargs):
encoding = kwargs.pop('encoding', None)
timeout = kwargs.pop('timeout', 1000)
max_response_length = kwargs.pop('max_response_length', 4096)
return self.execute_command(verb_number, in_format, out_format, name=name, class_name=class_name,
timeout=timeout, max_response_length=max_response_length, *arguments)
# Apply our known documentation to the given command.
method.__name__ = future_utils.native_str(name)
method.__doc__ = doc
# Generate a method signature object, so the python documentation will be correct.
# (This only helps on modern python, but oh well.)
try:
method.__signature__ = _generate_command_rpc_signature(
in_format, in_parameter_names, out_format, out_parameter_names)
except AttributeError:
pass
return method
:param metadata_timecheck: For ``getPAZ`` and ``getCoordinates`` check
if metadata information is changing from start to end time. Raises
an Exception if this is the case. This can be deactivated to save
time.
:rtype: :class:`~obspy.core.stream.Stream`
:return: A ObsPy Stream object.
"""
# NOTHING goes ABOVE this line!
# append all args to kwargs, thus having everything in one dictionary
for key, value in locals().items():
if key not in ["self", "kwargs"]:
kwargs[key] = value
# allow time strings in arguments
for time_ in ["starttime", "endtime"]:
if isinstance(kwargs[time_], (str, native_str)):
kwargs[time_] = UTCDateTime(kwargs[time_])
trim_start = kwargs['starttime']
trim_end = kwargs['endtime']
# we expand the requested timespan on both ends by two samples in
# order to be able to make use of the nearest_sample option of
# stream.trim(). (see trim() and tickets #95 and #105)
# only possible if a channel is specified otherwise delta = 0
delta = 2 * guessDelta(kwargs['channel'])
kwargs['starttime'] = trim_start - delta
kwargs['endtime'] = trim_end + delta
url = '/seismology/waveform/getWaveform'
data = self.client._fetch(url, **kwargs)
if not data:
raise Exception("No waveform data available")