

de Recherche et d’Innovation
en Cybersécurité et Société
Joudeh, I. O.; Cretu, A. -M.; Bouchard, S.; Guimond, S.
Prediction of Continuous Emotional Measures through Physiological and Visual Data † Journal Article
In: Sensors, vol. 23, no. 12, 2023, ISSN: 14248220, (Publisher: MDPI).
Abstract | Links | BibTeX | Tags: Affect recognition, Affective state, Arousal, Data-source, Deep learning, Electrocardiography, emotion, Emotion Recognition, Emotions, face recognition, Faces detection, Forecasting, human, Humans, Images processing, Learning systems, Machine learning, Machine-learning, mental disease, Mental Disorders, Physiological data, physiology, Signal-processing, Statistical tests, Video recording, Virtual-reality environment
@article{joudeh_prediction_2023,
title = {Prediction of Continuous Emotional Measures through Physiological and Visual Data †},
author = {I. O. Joudeh and A. -M. Cretu and S. Bouchard and S. Guimond},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85163943735&doi=10.3390%2fs23125613&partnerID=40&md5=5e970f0d8c5790b85d8d77a9f3f52a2d},
doi = {10.3390/s23125613},
issn = {14248220},
year = {2023},
date = {2023-01-01},
journal = {Sensors},
volume = {23},
number = {12},
abstract = {The affective state of a person can be measured using arousal and valence values. In this article, we contribute to the prediction of arousal and valence values from various data sources. Our goal is to later use such predictive models to adaptively adjust virtual reality (VR) environments and help facilitate cognitive remediation exercises for users with mental health disorders, such as schizophrenia, while avoiding discouragement. Building on our previous work on physiological, electrodermal activity (EDA) and electrocardiogram (ECG) recordings, we propose improving preprocessing and adding novel feature selection and decision fusion processes. We use video recordings as an additional data source for predicting affective states. We implement an innovative solution based on a combination of machine learning models alongside a series of preprocessing steps. We test our approach on RECOLA, a publicly available dataset. The best results are obtained with a concordance correlation coefficient (CCC) of 0.996 for arousal and 0.998 for valence using physiological data. Related work in the literature reported lower CCCs on the same data modality; thus, our approach outperforms the state-of-the-art approaches for RECOLA. Our study underscores the potential of using advanced machine learning techniques with diverse data sources to enhance the personalization of VR environments. © 2023 by the authors.},
note = {Publisher: MDPI},
keywords = {Affect recognition, Affective state, Arousal, Data-source, Deep learning, Electrocardiography, emotion, Emotion Recognition, Emotions, face recognition, Faces detection, Forecasting, human, Humans, Images processing, Learning systems, Machine learning, Machine-learning, mental disease, Mental Disorders, Physiological data, physiology, Signal-processing, Statistical tests, Video recording, Virtual-reality environment},
pubstate = {published},
tppubtype = {article}
}
Tremblay, L.; Chebbi, B.; Bouchard, S.
The predictive role of body image and anti-fat attitudes on attentional bias toward body area in haptic virtual reality environment Journal Article
In: Virtual Reality, vol. 26, no. 1, pp. 333–342, 2022, ISSN: 13594338, (Publisher: Springer Science and Business Media Deutschland GmbH).
Abstract | Links | BibTeX | Tags: body image, Body parts, Image Enhancement, Upper limbs, Virtual humans, virtual reality, Virtual-reality environment
@article{tremblay_predictive_2022,
title = {The predictive role of body image and anti-fat attitudes on attentional bias toward body area in haptic virtual reality environment},
author = {L. Tremblay and B. Chebbi and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85113139887&doi=10.1007%2fs10055-021-00569-4&partnerID=40&md5=3b77f665011b82e40c9ce8d5f438146a},
doi = {10.1007/s10055-021-00569-4},
issn = {13594338},
year = {2022},
date = {2022-01-01},
journal = {Virtual Reality},
volume = {26},
number = {1},
pages = {333–342},
abstract = {Evidence suggests that dissatisfaction with body image in women can be enhanced by exposure to media’s idealized images. The theory of social comparison and the avoidance hypothesis offer contradictory explanations of this relationship. We compare these two theories using a haptic virtual reality environment. We ask 42 female participants to interact with one of four types of virtual humans (VH) randomly assigned to them. The interaction task involves giving a virtual hug to a normal weight or overweight male or female VH. We verify the hypothesis that participants’ satisfaction with particular body parts and their anti-fat attitudes will determine the choice of the body area of the VH they will virtually touch. Our results show that: (1) touching VH lower torso is predicted by less anti-fat attitude, and avoidance of the upper torso and upper limb areas, and (2) touching VH shoulder and upper limbs areas is predicted by concerns with own stomach area and avoidance of VH lower torso and stomach waist areas. Our results tend to support the avoidance hypothesis as well as other research findings on anti-fat attitudes. © 2021, The Author(s), under exclusive licence to Springer-Verlag London Ltd., part of Springer Nature.},
note = {Publisher: Springer Science and Business Media Deutschland GmbH},
keywords = {body image, Body parts, Image Enhancement, Upper limbs, Virtual humans, virtual reality, Virtual-reality environment},
pubstate = {published},
tppubtype = {article}
}
Tremblay, L.; Bouchard, S.; Chebbi, B.; Wei, L.; Monthuy-Blanc, J.; Boulanger, D.
The development of a haptic virtual reality environment to study body image and affect Journal Article
In: Annual Review of CyberTherapy and Telemedicine, vol. 11, pp. 80–84, 2013, ISSN: 15548716, (Publisher: Virtual reality med institute).
Abstract | Links | BibTeX | Tags: Affect, Arousal, art, article, Biofeedback, body image, computer interface, Emotional communications, female, happiness, Haptic devices, Haptics, human, Human bodies, human experiment, Humans, male, methodology, Mood, motor performance, physiology, psychological aspect, psychology, Psychomotor Performance, Psychophysiology, sadness, Touch, User-Computer Interface, velocity, virtual reality, Virtual-reality environment, Young Adult
@article{tremblay_development_2013,
title = {The development of a haptic virtual reality environment to study body image and affect},
author = {L. Tremblay and S. Bouchard and B. Chebbi and L. Wei and J. Monthuy-Blanc and D. Boulanger},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84894231106&partnerID=40&md5=79731f3a31e9e70fcf3bf8f5db1f7d7c},
issn = {15548716},
year = {2013},
date = {2013-01-01},
journal = {Annual Review of CyberTherapy and Telemedicine},
volume = {11},
pages = {80–84},
abstract = {We report the results of a preliminary study testing the effect of participants’ mood rating on visual motor performance using a haptic device to manipulate a cartoonish human body. Our results suggest that moods involving high arousal (e.g. happiness) produce larger movements whereas mood involving low arousal (e.g. sadness) produce slower speed of performance. Our results are used for the development of a new haptic virtual reality application that we briefly present here. This application is intended to create a more interactive and motivational environment to treat body image issues and for emotional communication. © 2013 Interactive Media Institute.},
note = {Publisher: Virtual reality med institute},
keywords = {Affect, Arousal, art, article, Biofeedback, body image, computer interface, Emotional communications, female, happiness, Haptic devices, Haptics, human, Human bodies, human experiment, Humans, male, methodology, Mood, motor performance, physiology, psychological aspect, psychology, Psychomotor Performance, Psychophysiology, sadness, Touch, User-Computer Interface, velocity, virtual reality, Virtual-reality environment, Young Adult},
pubstate = {published},
tppubtype = {article}
}