Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def inference(x, y, model, alphabet):
""" Evaluates the log likelihood ratios given the model and input.
Then maps the results in the alphabet.
Args:
x(ndarray[float]): raw input sequence c x N
y(ndarray[int]): N x 1 label array
model(pipeline): trained likelihood model
alphabet:
Return:
scores(ndarray): N x c log-likelihood array
N is number of samples c is number of classes
"""
# This returns a sequence that is filtered and downsampled
dat = sig_pro(input_seq = x)
# This evaluates the loglikelihood probabilities for p(e|l=1) and p(e|l=0)
scores = np.exp(model.transform(dat))
# This evaluates the log likelihood ratios:
scores = -scores[:,1] / scores[:,0]
# This maps the log likelihood ratios to alphabet:
# If the letter in alphabet does not exist in y, it takes 1
# the likelihood ratio.
lik_r = np.ones(len(alphabet))
for i in range(len(alphabet)):
for j in range(len(y)):
if alphabet(i) == y(j):
lik_r[i] = scores[j]
return lik_r
import numpy as np
import matplotlib.pyplot as plt
from scipy.io import loadmat
"""
Test of filter on real data
"""
# reading previous filter from .mat file
Data = loadmat('sample_dat.mat')
EEG_Data = np.transpose(Data['x'])
fs = 256 # downsampling factor
k=1 # downsampling factor
# New filter: testing filter on real data
y = sig_pro(EEG_Data, fs = fs, k = 1)
# Old filter: testing filter on real data
filt = loadmat('inputFilterCoef.mat')
groupDelay = filt['frontendFilter']['groupDelay'][0,0][0][0]
filterNum = filt['frontendFilter']['Num'][0,0][0] # Den = 1
# Convolution per channel
temp = np.convolve(EEG_Data[0][:], filterNum)
# Filter off-set compensation
temp = temp[groupDelay:];
# Downsampling
Y = temp[::k]
plt.figure(1)
plt.plot(EEG_Data[0][0:200],'b')
plt.plot(y[0][0:200],'r')
plt.plot(Y[0:200],'g')