Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def setUpClass(cls):
global detector, mtcnn_detector
detector = FER()
mtcnn_detector = FER(mtcnn=True)
def setUpClass(cls):
global detector, mtcnn_detector
detector = FER()
mtcnn_detector = FER(mtcnn=True)
def test_video(self):
detector = FER()
video = Video("tests/woman2.mp4")
raw_data = video.analyze(detector, display=False)
assert isinstance(raw_data, list)
# Convert to pandas for analysis
df = video.to_pandas(raw_data)
assert sum(df.neutral[:5] > 0.5) == 5, f"Expected neutral > 0.5, got {df.neutral[:5]}"
assert isinstance(df, pd.DataFrame)
assert "angry" in df
df = video.get_first_face(df)
assert isinstance(df, pd.DataFrame)
df = video.get_emotions(df)
assert isinstance(df, pd.DataFrame)
def test_detect_faces_invalid_content(self):
"""
FER detects invalid images
:return:
"""
justin = cv2.imread("example.py")
with self.assertRaises(InvalidImage):
result = detector.detect_emotions(justin) # type: list
def test_video(self):
detector = FER()
video = Video("tests/woman2.mp4")
raw_data = video.analyze(detector, display=False)
assert isinstance(raw_data, list)
# Convert to pandas for analysis
df = video.to_pandas(raw_data)
assert sum(df.neutral[:5] > 0.5) == 5, f"Expected neutral > 0.5, got {df.neutral[:5]}"
assert isinstance(df, pd.DataFrame)
assert "angry" in df
df = video.get_first_face(df)
assert isinstance(df, pd.DataFrame)
df = video.get_emotions(df)
assert isinstance(df, pd.DataFrame)
import matplotlib
if os.name == 'posix' and "DISPLAY" not in os.environ:
matplotlib.use("Agg")
import matplotlib.pyplot as plt
from fer import FER
from fer import Video
if __name__ == "__main__":
try:
videofile = sys.argv[1]
except:
videofile = "test.mp4"
detector = FER(mtcnn=True)
video = Video(videofile)
# Output list of dictionaries
raw_data = video.analyze(detector, display=False)
# Convert to pandas for analysis
df = video.to_pandas(raw_data)
df = video.get_first_face(df)
df = video.get_emotions(df)
# Plot emotions
df.plot()
plt.show()
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import cv2
from fer import FER
detector = FER(mtcnn=True) # or with mtcnn=False for Haar Cascade Classifier
image = cv2.imread("justin.jpg")
result = detector.detect_emotions(image)
# Result is an array with all the bounding boxes detected. We know that for 'justin.jpg' there is only one.
bounding_box = result[0]["box"]
emotions = result[0]["emotions"]
cv2.rectangle(
image,
(bounding_box[0], bounding_box[1]),
(bounding_box[0] + bounding_box[2], bounding_box[1] + bounding_box[3]),
(0, 155, 255),
2,
)
def detect_emotions(self, img: np.ndarray) -> list:
"""
Detects bounding boxes from the specified image with ranking of emotions.
:param img: image to process (BGR or gray)
:return: list containing all the bounding boxes detected with their emotions.
"""
if img is None or not hasattr(img, "shape"):
raise InvalidImage("Image not valid.")
emotion_labels = self._get_labels()
face_rectangles = self.find_faces(img, bgr=True)
gray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
emotions = []
for face_coordinates in face_rectangles:
face_coordinates = self.tosquare(face_coordinates)
x1, x2, y1, y2 = self.__apply_offsets(face_coordinates)
if y1 < 0 or x1 < 0:
gray_img = self.pad(gray_img)
x1 += 40
x2 += 40
import matplotlib
if os.name == 'posix' and "DISPLAY" not in os.environ:
matplotlib.use("Agg")
import matplotlib.pyplot as plt
from fer import FER
from fer import Video
if __name__ == "__main__":
try:
videofile = sys.argv[1]
except:
videofile = "test.mp4"
detector = FER(mtcnn=True)
video = Video(videofile)
# Output list of dictionaries
raw_data = video.analyze(detector, display=False)
# Convert to pandas for analysis
df = video.to_pandas(raw_data)
df = video.get_first_face(df)
df = video.get_emotions(df)
# Plot emotions
df.plot()
plt.show()
def load_video(filename):
global current_video
current_video = Video(
filename, outdir='/tmp', tempfile=to_uploads('temp_outfile.mp4'))
return current_video