Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _embedding_dimension_ffn_d(signal, dimension, delay=1, R=10.0, A=2.0, metric="euclidean", window=10, maxnum=None):
"""Return fraction of false nearest neighbors for a single d.
"""
# We need to reduce the number of points in dimension d by tau
# so that after reconstruction, there'll be equal number of points
# at both dimension d as well as dimension d + 1.
y1 = complexity_embedding(signal[:-delay], delay=delay, dimension=dimension)
y2 = complexity_embedding(signal, delay=delay, dimension=dimension + 1)
# Find near neighbors in dimension d.
index, dist = _embedding_dimension_neighbors(y1, metric=metric, window=window, maxnum=maxnum)
# Compute the near-neighbor distances in d + 1 dimension
d = np.asarray([scipy.spatial.distance.chebyshev(i, j) for i, j in zip(y2, y2[index])])
# Find all potential false neighbors using Kennel et al.'s tests.
f1 = np.abs(y2[:, -1] - y2[index, -1]) / dist > R
f2 = d / np.std(signal) > A
f3 = f1 | f2
return np.mean(f1), np.mean(f2), np.mean(f3)
be increased (i.e., beyond 2 * window + 3). Defaults to None (optimum).
show : bool
Defaults to False.
Returns
-------
index : array
Array containing indices of near neighbors.
dist : array
Array containing near neighbor distances.
"""
# Sanity checks
if len(signal.shape) == 1:
y = complexity_embedding(signal, delay=delay, dimension=dimension_max)
else:
y = signal
if metric == "chebyshev":
p = np.inf
elif metric == "cityblock":
p = 1
elif metric == "euclidean":
p = 2
else:
raise ValueError('Unknown metric. Should be one of "cityblock", ' '"euclidean", or "chebyshev".')
tree = scipy.spatial.cKDTree(y) # pylint: disable=E1102
n = len(y)
if not maxnum:
ax1 = fig.add_subplot(spec[1], projection="3d")
else:
fig = None
ax0.set_title("Optimization of Delay (tau)")
ax0.set_xlabel("Time Delay (tau)")
ax0.set_ylabel(metric)
ax0.plot(tau_sequence, metric_values, color="#FFC107")
ax0.axvline(x=tau, color="#E91E63", label="Optimal delay: " + str(tau))
ax0.legend(loc="upper right")
ax1.set_title("Attractor")
ax1.set_xlabel("Signal [i]")
ax1.set_ylabel("Signal [i-" + str(tau) + "]")
# Get data points, set axis limits
embedded = complexity_embedding(signal, delay=tau, dimension=3)
x = embedded[:, 0]
y = embedded[:, 1]
z = embedded[:, 2]
ax1.set_xlim(x.min(), x.max())
ax1.set_ylim(x.min(), x.max())
# Colors
norm = plt.Normalize(z.min(), z.max())
cmap = plt.get_cmap("plasma")
colors = cmap(norm(x))
# Attractor for 2D vs 3D
if plot == "2D":
points = np.array([x, y]).T.reshape(-1, 1, 2)
segments = np.concatenate([points[:-1], points[1:]], axis=1)
lc = matplotlib.collections.LineCollection(segments, cmap="plasma", norm=norm)
>>> import neurokit2 as nk
>>>
>>> signal = nk.signal_simulate(duration=2, frequency=5)
>>> delay = nk.complexity_delay(signal)
>>>
>>> embbeded, count = _get_embedded(signal, delay, r=0.2 * np.std(signal, ddof=1), dimension=2, distance='chebyshev', approximate=False)
"""
# Sanity checks
if distance not in sklearn.neighbors.KDTree.valid_metrics:
raise ValueError(
"NeuroKit error: _get_embedded(): The given metric (%s) is not valid. The valid metric names are: %s"
% (distance, sklearn.neighbors.KDTree.valid_metrics)
)
# Get embedded
embedded = complexity_embedding(signal, delay=delay, dimension=dimension)
if approximate is False:
embedded = embedded[:-1] # Removes the last line
if fuzzy is False:
# Get neighbors count
count = _get_count(embedded, r=r, distance=distance)
else:
# FuzzyEn: Remove the local baselines of vectors
embedded -= np.mean(embedded, axis=1, keepdims=True)
count = _get_count_fuzzy(embedded, r=r, distance=distance, n=1)
return embedded, count
def _embedding_dimension_ffn_d(signal, dimension, delay=1, R=10.0, A=2.0, metric="euclidean", window=10, maxnum=None):
"""Return fraction of false nearest neighbors for a single d.
"""
# We need to reduce the number of points in dimension d by tau
# so that after reconstruction, there'll be equal number of points
# at both dimension d as well as dimension d + 1.
y1 = complexity_embedding(signal[:-delay], delay=delay, dimension=dimension)
y2 = complexity_embedding(signal, delay=delay, dimension=dimension + 1)
# Find near neighbors in dimension d.
index, dist = _embedding_dimension_neighbors(y1, metric=metric, window=window, maxnum=maxnum)
# Compute the near-neighbor distances in d + 1 dimension
d = np.asarray([scipy.spatial.distance.chebyshev(i, j) for i, j in zip(y2, y2[index])])
# Find all potential false neighbors using Kennel et al.'s tests.
f1 = np.abs(y2[:, -1] - y2[index, -1]) / dist > R
f2 = d / np.std(signal) > A
f3 = f1 | f2
return np.mean(f1), np.mean(f2), np.mean(f3)