Skip to content

Commit

Permalink
aruco mods
Browse files Browse the repository at this point in the history
  • Loading branch information
ddetommaso committed Apr 10, 2020
1 parent d53bdc8 commit ffb9307
Show file tree
Hide file tree
Showing 8 changed files with 78 additions and 41 deletions.
6 changes: 3 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,11 @@

setup(
name='tobiiglasses',
version='1.0',
version='1.1',
description='An open-source Python suite for Tobii Pro Glasses 2',
url='https://github.com/ddetommaso/TobiiGlassesPySuite',
download_url='https://github.com/ddetommaso/TobiiGlassesPySuite/archive/1.0.tar.gz',
install_requires=['tobiiglassesctrl>=2.2.5', 'tornado', 'nose', 'pandas', 'matplotlib', 'opencv-contrib-python', 'sortedcontainers==1.5.10', 'dlib'],
download_url='https://github.com/ddetommaso/TobiiGlassesPySuite/archive/1.1.tar.gz',
install_requires=['tobiiglassesctrl>=2.2.6', 'tornado', 'nose', 'pandas', 'matplotlib', 'opencv-contrib-python', 'sortedcontainers==1.5.10', 'dlib'],
author='Davide De Tommaso',
author_email='[email protected]',
keywords=['eye-tracker','tobii','glasses', 'tobii pro glasses 2', 'tobii glasses', 'eye tracking'],
Expand Down
65 changes: 41 additions & 24 deletions tobiiglasses/aoi/aruco/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,9 @@ def getCorners(self, opencvMat, auro_dict):
return (corners, ids)

def getDetectedFeatures(self, board, corners, ids):
(pose, rvec, tvec) = board.estimatePose(corners, ids, self.__cameraMatrix__, self.__distCoeffs__)
(valid, rvec, tvec) = board.estimatePose(corners, ids, self.__cameraMatrix__, self.__distCoeffs__)
detected_features_points = None
if pose[0] > 0:
if valid > 0:
imgpts, jac = cv2.projectPoints(board.feature_3dpoints, rvec, tvec, self.__cameraMatrix__, self.__distCoeffs__)
detected_features_points = np.array([[int(imgpts[0][0][0]), int(imgpts[0][0][1])],
[int(imgpts[1][0][0]), int(imgpts[1][0][1])],
Expand All @@ -50,19 +50,22 @@ def getDetectedFeatures(self, board, corners, ids):


class AOI_Aruco:
MARKERS_X = 3
MARKERS_Y = 2
ARUCO_DICT = aruco.Dictionary_get(aruco.DICT_4X4_50)
MARKERS_X = 3#7
MARKERS_Y = 2#4
ARUCO_DICT = aruco.Dictionary_get(aruco.DICT_4X4_100)

def __init__(self, aoi_label, aoi_id, aruco_detector, markerLength=0.1, markerSeparation=0.1):
def __init__(self, aoi_label, aoi_id, aruco_detector, markerLength=0.1, markerSeparation=0.1, width=1920, height=1080):
self.aoi_label = aoi_label
self.aoi_id = aoi_id
self.markerLength = markerLength
self.markerSeparation = markerSeparation
self.feature_3dpoints = np.array( [[self.markerLength, 0.0, 0.0],
[2*self.markerLength + 2*self.markerSeparation, 0.0, 0.0],
[self.markerLength, 2*self.markerLength + self.markerSeparation, 0.0],
[2*self.markerLength + 2*self.markerSeparation, 2*self.markerLength + self.markerSeparation, 0.0]])
self.__width__ = width
self.__height__ = height

self.feature_3dpoints = np.array( [[self.markerLength, AOI_Aruco.MARKERS_Y*self.markerLength + (AOI_Aruco.MARKERS_Y-1)*self.markerSeparation, 0.0],
[(AOI_Aruco.MARKERS_X-1)*self.markerLength + (AOI_Aruco.MARKERS_X-1)*self.markerSeparation, AOI_Aruco.MARKERS_Y*self.markerLength + (AOI_Aruco.MARKERS_Y-1)*self.markerSeparation, 0.0],
[self.markerLength, 0.0, 0.0],
[(AOI_Aruco.MARKERS_X-1)*self.markerLength + (AOI_Aruco.MARKERS_X-1)*self.markerSeparation, 0.0, 0.0] ])

self.__aoi__ = aruco.GridBoard_create(markersX=AOI_Aruco.MARKERS_X,
markersY=AOI_Aruco.MARKERS_Y,
Expand All @@ -71,14 +74,14 @@ def __init__(self, aoi_label, aoi_id, aruco_detector, markerLength=0.1, markerSe
dictionary=AOI_Aruco.ARUCO_DICT,
firstMarker=aoi_id*AOI_Aruco.MARKERS_X*AOI_Aruco.MARKERS_Y)

corners, ids = aruco_detector.getCorners(self.getCVMat(), AOI_Aruco.ARUCO_DICT)
self.features_2dpoints = aruco_detector.getDetectedFeatures(self, corners, ids)
corners, self.__ids__ = aruco_detector.getCorners(self.getCVMat(), AOI_Aruco.ARUCO_DICT)
self.features_2dpoints = aruco_detector.getDetectedFeatures(self, corners, self.__ids__)

def estimatePose(self, corners, ids, cameraMatrix, distCoeffs):
rvec = np.array( [0.0, 0.0, 0.0] )
tvec = np.array( [0.0, 0.0, 0.0] )
pose = aruco.estimatePoseBoard(corners, ids, self.__aoi__, cameraMatrix, distCoeffs, rvec, tvec)
return (pose, rvec, tvec)
(valid, rvec, tvec) = aruco.estimatePoseBoard(corners, ids, self.__aoi__, cameraMatrix, distCoeffs, rvec, tvec)
return (valid, rvec, tvec)

def exportAOI(self, filepath):
img = self.getCVMat()
Expand All @@ -88,10 +91,10 @@ def exportAOI(self, filepath):
def getAOI(self):
return self.__aoi__

def getCVMat(self, width=1920, height=1080):
img = np.zeros([height, width, 1],dtype=np.uint8)
def getCVMat(self):
img = np.zeros([self.__height__, self.__width__, 1],dtype=np.uint8)
img.fill(255)
self.__aoi__.draw((width, height), img, 40)
self.__aoi__.draw((self.__width__, self.__height__), img, 40)
img = cv2.cvtColor(img, cv2.COLOR_GRAY2RGB)
return img

Expand All @@ -110,19 +113,33 @@ def __init__(self, cameraMatrix, distCoeffs):
def getDetectedItems(self, opencvMat):
(corners, ids) = self.__aruco_detector__.getCorners(opencvMat, AOI_Aruco.ARUCO_DICT)
AOI_Items = []
if ids is None:
return AOI_Items
for label, board in self.__aoi_boards__.items():
detected_features_points = self.__aruco_detector__.getDetectedFeatures(board, corners, ids)
if detected_features_points is None:
break
else:
AOI_Items.append( AOI_Item(label, detected_features_points, board.getAOIFilename(), board.features_2dpoints) )
filtered_corners = []
filtered_ids = []
for marker_id in ids:
if marker_id in board.__ids__:
filtered_corners.append(corners[ids.tolist().index(marker_id)])
filtered_ids.append(marker_id)
filtered_ids = np.asarray(filtered_ids, dtype=np.int32)
filtered_corners = np.asarray(filtered_corners, dtype=np.float32)
if len(filtered_ids) > 0:
detected_features_points = self.__aruco_detector__.getDetectedFeatures(board, filtered_corners, filtered_ids)
if detected_features_points is None:
break
else:
item = AOI_Item(label, detected_features_points, board.getAOIFilename(), board.features_2dpoints)
AOI_Items.append(item)

return AOI_Items

def createArucoAOI(self, aoi_label, markerLength=0.1, markerSeparation=0.1):
def createArucoAOI(self, aoi_label, markerLength=0.1, markerSeparation=0.1, width=1920, height=1080):
if aoi_label in self.__aoi_boards__.keys():
logging.error('aoi_label is already present. AOI will not be created!')
else:
self.__aoi_boards__[aoi_label] = AOI_Aruco(aoi_label, len(self.__aoi_boards__.keys()), self.__aruco_detector__, markerLength, markerSeparation)
self.__aoi_boards__[aoi_label] = AOI_Aruco(aoi_label, len(self.__aoi_boards__.keys()), self.__aruco_detector__, markerLength, markerSeparation, width, height)
self.__aoi_boards__[aoi_label].exportAOI('./')

def exportArucoAOIs(self, filepath='./'):
for label, board in self.__aoi_boards__.items():
Expand Down
2 changes: 1 addition & 1 deletion tobiiglasses/aoi/clustering.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def fit(self, gaze_events, ts_filter):
color_labels = self.__assignColorLabels__(self.__labels__)
i = 0
for ts in ts_list:
gaze_events.setAOI(ts, color_labels[i], self.__getAOI_Distance__(self.__X__[i][0], self.__X__[i][1], self.__means__[self.__labels__[i]]))
gaze_events.setAOI(ts, self.__X__[i][0], self.__X__[i][1], color_labels[i], self.__getAOI_Distance__(self.__X__[i][0], self.__X__[i][1], self.__means__[self.__labels__[i]]))
i+=1

def saveModel(self, filename):
Expand Down
9 changes: 4 additions & 5 deletions tobiiglasses/aoi/dnn/faces/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,8 @@
import sys
import tobiiglasses
from collections import OrderedDict
from tobiiglasses.aoi.dnn.model import AOI_DNN_Model
from tobiiglasses.aoi.model import AOI
from tobiiglasses.aoi.model import AOI_Item
#from tobiiglasses.aoi.dnn.model import AOI_DNN_Model
from tobiiglasses.aoi.model import AOI, AOI_Item

FACIAL_LANDMARKS_IDXS = OrderedDict([
("mouth", (48, 67)),
Expand All @@ -35,8 +34,8 @@
("jaw", (0, 16))
])

FILENAME_SHAPE_PREDICTOR = os.path.join(tobiiglasses.aoi.dnn.__path__[0], 'shape_predictor_68_face_landmarks.dat')
FILENAME_FACE_LANDMARKS = os.path.join(tobiiglasses.aoi.dnn.__path__[0], 'facial_landmarks_68markup.png')
FILENAME_SHAPE_PREDICTOR = os.path.join(tobiiglasses.aoi.dnn.__path__[0], 'faces', 'shape_predictor_68_face_landmarks.dat')
FILENAME_FACE_LANDMARKS = os.path.join(tobiiglasses.aoi.dnn.__path__[0], 'faces', 'facial_landmarks_68markup.png')
FACE_FEATURES_POINTS = np.array( [[132, 60], [330, 620], [515, 615], [675, 50]]) #ids = 17, 3, 13, 26

class Face:
Expand Down
4 changes: 3 additions & 1 deletion tobiiglasses/aoi/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,9 @@ def apply(self, opencvMat, ts, gaze_x, gaze_y, gaze_events):
q = np.linalg.inv(H).dot(p)
q /= q[2]
self.__aoi_hits__[aoi_id][ts] = [q[0], q[1]]
gaze_events.setAOI(ts, aoi_id, item.aoi_score)
gaze_events.setAOI(ts, int(q[0]), int(q[1]), aoi_id, item.aoi_score)
else:
gaze_events.setAOI(ts, 0, 0, aoi_id, 0)

self.showLandmarks(opencvMat, item.landmarks)

Expand Down
8 changes: 7 additions & 1 deletion tobiiglasses/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ class GazeEvents:
EventDuration = "Event Duration"
Fixation_X = "Fixation X"
Fixation_Y = "Fixation Y"
AOI_Mapped_Fixation_X = "AOI_Mapped_Fixation_X"
AOI_Mapped_Fixation_Y = "AOI_Mapped_Fixation_Y"
AOI = "AOI"
AOI_Score = "AOI Score"
Saccade_Start_X = "Saccade Start X"
Expand Down Expand Up @@ -69,6 +71,8 @@ def __init_datatypes__(self):
self.__events__[GazeEvents.Fixation_Y] = GazeItem(GazeEvents.Fixation_Y, np.dtype('u4'))
self.__events__[GazeEvents.EventIndex] = GazeItem(GazeEvents.EventIndex, np.dtype('u4'))
self.__events__[GazeEvents.EventDuration] = GazeItem(GazeEvents.EventDuration, np.dtype('u4'))
self.__events__[GazeEvents.AOI_Mapped_Fixation_X] = GazeItem(GazeEvents.AOI_Mapped_Fixation_X, np.dtype('u4'))
self.__events__[GazeEvents.AOI_Mapped_Fixation_Y] = GazeItem(GazeEvents.AOI_Mapped_Fixation_Y, np.dtype('u4'))
self.__events__[GazeEvents.AOI] = GazeItem(GazeEvents.AOI, np.dtype(object))
self.__events__[GazeEvents.AOI_Score] = GazeItem(GazeEvents.AOI_Score, np.dtype('f2'))
self.__events__[GazeEvents.Saccade_Start_X] = GazeItem(GazeEvents.Saccade_Start_X, np.dtype('f2'))
Expand Down Expand Up @@ -145,7 +149,9 @@ def getFixationsAsNumpy(self, ts_filter):
def getTimestamps(self):
return list(self.__events__[GazeEvents.Timestamp].values())

def setAOI(self, ts, aoi_label, aoi_score):
def setAOI(self, ts, aoi_fixation_x, aoi_fixation_y, aoi_label, aoi_score):
self.__events__[GazeEvents.AOI_Mapped_Fixation_X][ts] = aoi_fixation_x
self.__events__[GazeEvents.AOI_Mapped_Fixation_Y][ts] = aoi_fixation_y
self.__events__[GazeEvents.AOI][ts] = aoi_label
self.__events__[GazeEvents.AOI_Score][ts] = aoi_score

Expand Down
2 changes: 2 additions & 0 deletions tobiiglasses/exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,8 @@ def __init__(self, filepath, filename, fixations_df):
self.__headers__.append(TG.events.GazeEvents.Fixation_X)
self.__headers__.append(TG.events.GazeEvents.Fixation_Y)
self.__headers__.append(TG.events.GazeEvents.EventDuration)
self.__headers__.append(TG.events.GazeEvents.AOI_Mapped_Fixation_X)
self.__headers__.append(TG.events.GazeEvents.AOI_Mapped_Fixation_Y)
self.__headers__.append(TG.events.GazeEvents.AOI)
self.__headers__.append(TG.events.GazeEvents.AOI_Score)
self.__fixations_df__ = fixations_df
Expand Down
23 changes: 17 additions & 6 deletions tobiiglasses/recordings.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from tobiiglasses.events import GazeEvents
from tobiiglasses.exporter import RawCSV, ExtendedRawCSV
from tobiiglasses.video import VideoFramesAndMappedGaze, VideoAndGaze
from tobiiglasses.metrics import Fixations_Metrics

import cv2.aruco as aruco

Expand Down Expand Up @@ -67,11 +68,11 @@ def __getSegmentIDs__(self, segment_id=None):
def __loadSegmentIDs__(self):
self.__segment_ids__.extend(range(1, self.__recording__.getSegmentsN() + 1))

def exportFull(self, fixation_filter, filepath=None, filename='output.avi', segment_id=1, aoi_models=[]):
def exportFull(self, fixation_filter, filepath=None, csv_filename='output.csv', video_filename='output.avi', segment_id=1, aoi_models=[]):
fixations = self.getFixations(fixation_filter, ts_filter=None, segment_id=segment_id)
if filepath is None:
filepath = "."
logging.info('Exporting video with mapped fixations in file %s in folder %s' % (filename, filepath))
logging.info('Exporting video with mapped fixations in folder %s' % filepath)
data = self.getGazeData(segment_id)
fps = data.getFrameFPS()
width = data.getFrameWidth()
Expand All @@ -80,9 +81,18 @@ def exportFull(self, fixation_filter, filepath=None, filename='output.avi', segm
cap = cv2.VideoCapture(f)
framesAndGaze = iter(VideoFramesAndMappedGaze(data, cap, fps))
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter(filename,fourcc, fps, (width,height))
out = cv2.VideoWriter(video_filename,fourcc, fps, (width,height))


for frame, x, y, ts in framesAndGaze:
if not (x > 0 or y > 0):
continue
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
aruco_dict = aruco.Dictionary_get(aruco.DICT_4X4_50)
parameters = aruco.DetectorParameters_create()
corners, ids, rejectedImgPoints = aruco.detectMarkers(gray, aruco_dict, parameters=parameters)
(fx, fy, duration) = fixations.getClosestFixation(ts)

for model in aoi_models:
model.apply(frame, ts, fx, fy, fixations)
model.drawAOIsBox(frame, ts)
Expand All @@ -95,12 +105,13 @@ def exportFull(self, fixation_filter, filepath=None, filename='output.avi', segm
color = (0,255,0)
cv2.putText(frame, aoi_id, (fx, fy), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 255, 0), 2)
cv2.circle(frame,(fx,fy), 30, color , 2)
out.write(frame)
frame_markers = aruco.drawDetectedMarkers(frame.copy(), corners, ids)
out.write(frame_markers)
cap.release()
model.exportHeatmap()
(filepath, filename) = self.__getFileParams__('csv', segment_id, filepath, 'dave', 'Fixations')
(filepath, filename) = self.__getFileParams__('csv', segment_id, filepath, csv_filename, 'Fixations')
fixations.exportCSV(filepath, filename, ts_filter=None)

f_metrics = Fixations_Metrics(fixations)


def exportCSV_ExtendedRawData(self, filepath=None, filename=None, segment_id=None):
Expand Down

0 comments on commit ffb9307

Please sign in to comment.