@inproceedings{22ed7dbae5c14324ac94c0983146a0c6,
title = "Evaluation of Different Radar Placements for Food Intake Monitoring Using Deep Learning",
abstract = "Automated food intake monitoring has drawn significant attention due to its potential applications in the healthcare domain. Plenty of research, including wrist-worn imu-based and camera-based approaches, have emerged to detect food intake activity passively and objectively. Recently, researchers explored radar for food intake monitoring because of its contactless and privacy-preserving characteristics. In this study, we deploy the Frequency Modulated Continuous Wave (FMCW) radar in three different positions to investigate the performance of each position in automated eating gesture detection. The three positions are front, side, and overhead. Fifteen participants are recruited to have three meals (45 meals, 641 min in total), while the radar is deployed in different positions in each meal. A 3D Temporal Convolutional Network (3D-TCN) is used to process the range-doppler cube (RD Cube) of each dataset. The Leave-One-Subject-Out (LOSO) validation method shows that putting radar in the front position obtains the best performance with a segmental F1-score of 0.786 and 0.825 for eating and drinking gestures, respectively.",
keywords = "deep learning, Eating gesture detection, FMCW radar, Food intake monitoring, Human activity recognition",
author = "Chunzhuo Wang and Sunil Kumar and {De Raedt}, Walter and Guido Camps and Hans Hallez and Bart Vanrumste",
year = "2023",
month = jun,
day = "21",
doi = "10.1109/RadarConf2351548.2023.10149626",
language = "English",
series = "Proceedings of the IEEE Radar Conference",
publisher = "IEEE",
pages = "1--6",
booktitle = "RadarConf23 - 2023 IEEE Radar Conference, Proceedings",
address = "United States",
note = "2023 IEEE Radar Conference, RadarConf23 ; Conference date: 01-05-2023 Through 05-05-2023",
}