

de Recherche et d’Innovation
en Cybersécurité et Société
Joudeh, I. O.; Cretu, A. -M.; Bouchard, S.; Guimond, S.
Prediction of Emotional States from Partial Facial Features for Virtual Reality Applications Article de journal
Dans: Annual Review of CyberTherapy and Telemedicine, vol. 21, p. 17–21, 2023, ISSN: 15548716, (Publisher: Interactive Media Institute).
Résumé | Liens | BibTeX | Étiquettes: Arousal, article, clinical article, convolutional neural network, correlation coefficient, data base, emotion, facies, female, human, human experiment, Image processing, long short term memory network, male, random forest, residual neural network, root mean squared error, videorecording, virtual reality
@article{joudeh_prediction_2023-1,
title = {Prediction of Emotional States from Partial Facial Features for Virtual Reality Applications},
author = {I. O. Joudeh and A. -M. Cretu and S. Bouchard and S. Guimond},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85182471413&partnerID=40&md5=8190e0dbb5b48ae508515f4029b0a0d1},
issn = {15548716},
year = {2023},
date = {2023-01-01},
journal = {Annual Review of CyberTherapy and Telemedicine},
volume = {21},
pages = {17–21},
abstract = {The availability of virtual reality (VR) in numerous clinical contexts has been made possible by recent technological advancements. One application is using VR for cognitive interventions with individuals who have mental disorders. Predicting the emotional states of users could help to prevent their discouragement during VR interventions. We can monitor the emotional states of individuals using sensors like an external camera, as they engage in various tasks within VR environments. The emotional state of VR users can be measured through arousal and valence, as per the Circumplex model. We used the Remote Collaborative and Affective Interactions (RECOLA) database of emotional behaviours. We processed video frames from 18 RECOLA videos. Due to the headset in VR systems, we detected faces and cropped the images of faces to use the lower half of the face only. We labeled the images with arousal and valence values to reflect various emotions. Convolutional neural networks (CNNs), specifically MobileNet-v2 and ResNets-18, were then used to predict arousal and valence values. MobileNet-v2 outperforms ResNet-18 as well as others from the literature. We achieved a root mean squared error (RMSE), Pearson’s correlation coefficient (PCC), and Concordance correlation coefficient (CCC) of 0.1495, 0.6387, and 0.6081 for arousal, and 0.0996, 0.6453, and 0.6232 for valence. Our work acts as a proof-of-concept for predicting emotional states from arousal and valence values via visual data of users immersed in VR experiences. In the future, predicted emotions could be used to automatically adjust the VR environment for individuals engaged in cognitive interventions. © 2023, Interactive Media Institute. All rights reserved.},
note = {Publisher: Interactive Media Institute},
keywords = {Arousal, article, clinical article, convolutional neural network, correlation coefficient, data base, emotion, facies, female, human, human experiment, Image processing, long short term memory network, male, random forest, residual neural network, root mean squared error, videorecording, virtual reality},
pubstate = {published},
tppubtype = {article}
}
Charbonneau, I.; Robinson, K.; Blais, C.; Fiset, D.
Implicit race attitudes modulate visual information extraction for trustworthiness judgments Article de journal
Dans: PLoS ONE, vol. 15, no 9 September, 2020, ISSN: 19326203, (Publisher: Public Library of Science).
Résumé | Liens | BibTeX | Étiquettes: adult, African American, African Americans, article, Attitude, Caucasian, decision making, Ethics, European Continental Ancestry Group, extraction, eyelash, Facial Expression, facies, female, human, Humans, Judgment, male, perception, physiology, psychology, Racism, Social Perception, Stereotyping, visual information, wrinkle, Young Adult
@article{charbonneau_implicit_2020,
title = {Implicit race attitudes modulate visual information extraction for trustworthiness judgments},
author = {I. Charbonneau and K. Robinson and C. Blais and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85091622106&doi=10.1371%2fjournal.pone.0239305&partnerID=40&md5=18ca2332affc9cb41d17afc8c450b0b4},
doi = {10.1371/journal.pone.0239305},
issn = {19326203},
year = {2020},
date = {2020-01-01},
journal = {PLoS ONE},
volume = {15},
number = {9 September},
abstract = {Black people are still considered to be one of the most stigmatized groups and have to face multiple prejudices that undermine their well-being. Assumptions and beliefs about other racial groups are quite pervasive and have been shown to impact basic social tasks such as face processing. For example, individuals with high racial prejudice conceptualize other-race faces as less trustworthy and more criminal. However, it is unknown if implicit racial bias could modulate even low-level perceptual mechanisms such as spatial frequency (SF) extraction when judging the level of trustworthiness of other-race faces. The present study showed that although similar facial features are used to judge the trustworthiness of White and Black faces, own-race faces are processed in lower SF (i.e. coarse information such as the contour of the face and blurred shapes as opposed to high SF representing fine-grained information such as eyelashes or fine wrinkles). This pattern was modulated by implicit race biases: higher implicit biases are associated with a significantly higher reliance on low SF with White than with Black faces. Copyright: © 2020 Charbonneau et al. This is an open access article distributed under the terms of the Creative Commons Attribution License, which permits unrestricted use, distribution, and reproduction in any medium, provided the original author and source are credited.},
note = {Publisher: Public Library of Science},
keywords = {adult, African American, African Americans, article, Attitude, Caucasian, decision making, Ethics, European Continental Ancestry Group, extraction, eyelash, Facial Expression, facies, female, human, Humans, Judgment, male, perception, physiology, psychology, Racism, Social Perception, Stereotyping, visual information, wrinkle, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Blais, C.; Fiset, D.; Furumoto-Deshaies, H.; Kunz, M.; Seuss, D.; Cormier, S.
Facial Features Underlying the Decoding of Pain Expressions Article de journal
Dans: Journal of Pain, vol. 20, no 6, p. 728–738, 2019, ISSN: 15265900 (ISSN), (Publisher: Churchill Livingstone Inc.).
Résumé | Liens | BibTeX | Étiquettes: adult, article, attention, decoding, Eye, Facial Expression, facies, female, human, human experiment, Humans, lip, male, Memory, Nose, Pain, pain assessment, pain dimensions, pain measurement, sensory analysis, wrinkle, Young Adult
@article{blais_facial_2019,
title = {Facial Features Underlying the Decoding of Pain Expressions},
author = {C. Blais and D. Fiset and H. Furumoto-Deshaies and M. Kunz and D. Seuss and S. Cormier},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85060707205&doi=10.1016%2fj.jpain.2019.01.002&partnerID=40&md5=2d2dd305de430a7ce8973644f57a4996},
doi = {10.1016/j.jpain.2019.01.002},
issn = {15265900 (ISSN)},
year = {2019},
date = {2019-01-01},
journal = {Journal of Pain},
volume = {20},
number = {6},
pages = {728–738},
abstract = {Previous research has revealed that the face is a finely tuned medium for pain communication. Studies assessing the decoding of facial expressions of pain have revealed an interesting discrepancy, namely that, despite eyes narrowing being the most frequent facial expression accompanying pain, individuals mostly rely on brow lowering and nose wrinkling/upper lip raising to evaluate pain. The present study verifies if this discrepancy may reflect an interaction between the features coding pain expressions and the features used by observers and stored in their mental representations. Experiment 1 shows that more weight is allocated to the brow lowering and nose wrinkling/upper lip raising, supporting the idea that these features are allocated more importance when mental representations of pain expressions are stored in memory. These 2 features have been associated with negative valence and with the affective dimension of pain, whereas the eyes narrowing feature has been associated more closely with the sensory dimension of pain. However, experiment 2 shows that these 2 features remain more salient than eyes narrowing, even when attention is specifically directed toward the sensory dimension of pain. Together, these results suggest that the features most saliently coded in the mental representation of facial expressions of pain may reflect a bias toward allocating more weight to the affective information encoded in the face. Perspective: This work reveals the relative importance of 3 facial features representing the core of pain expressions during pain decoding. The results show that 2 features are over-represented; this finding may potentially be linked with the estimation biases occurring when clinicians and lay persons evaluate pain based on facial appearance. © 2019 the American Pain Society},
note = {Publisher: Churchill Livingstone Inc.},
keywords = {adult, article, attention, decoding, Eye, Facial Expression, facies, female, human, human experiment, Humans, lip, male, Memory, Nose, Pain, pain assessment, pain dimensions, pain measurement, sensory analysis, wrinkle, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Royer, J.; Blais, C.; Barnabé-Lortie, V.; Carré, M.; Leclerc, J.; Fiset, D.
Efficient visual information for unfamiliar face matching despite viewpoint variations: It's not in the eyes! Article de journal
Dans: Vision Research, vol. 123, p. 33–40, 2016, ISSN: 00426989 (ISSN), (Publisher: Elsevier Ltd).
Résumé | Liens | BibTeX | Étiquettes: accuracy, adult, article, association, attention, Bubbles, Evoked Potentials, eye fixation, Face, face profile, face recognition, Facial Recognition, facies, female, Fixation, human, human experiment, Humans, Image analysis, Individual differences, male, Ocular, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, procedures, Psychophysics, recognition, Recognition (Psychology), regression analysis, task performance, unfamiliar face matching, viewpoint variation, Viewpoint variations, Visual, visual discrimination, visual evoked potential, visual information, visual memory, visual stimulation, visual system parameters, Young Adult
@article{royer_efficient_2016,
title = {Efficient visual information for unfamiliar face matching despite viewpoint variations: It's not in the eyes!},
author = {J. Royer and C. Blais and V. Barnabé-Lortie and M. Carré and J. Leclerc and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84968779426&doi=10.1016%2fj.visres.2016.04.004&partnerID=40&md5=4c63f6eea279f7322c9af23ae9ed22c1},
doi = {10.1016/j.visres.2016.04.004},
issn = {00426989 (ISSN)},
year = {2016},
date = {2016-01-01},
journal = {Vision Research},
volume = {123},
pages = {33–40},
abstract = {Faces are encountered in highly diverse angles in real-world settings. Despite this considerable diversity, most individuals are able to easily recognize familiar faces. The vast majority of studies in the field of face recognition have nonetheless focused almost exclusively on frontal views of faces. Indeed, a number of authors have investigated the diagnostic facial features for the recognition of frontal views of faces previously encoded in this same view. However, the nature of the information useful for identity matching when the encoded face and test face differ in viewing angle remains mostly unexplored. The present study addresses this issue using individual differences and bubbles, a method that pinpoints the facial features effectively used in a visual categorization task. Our results indicate that the use of features located in the center of the face, the lower left portion of the nose area and the center of the mouth, are significantly associated with individual efficiency to generalize a face's identity across different viewpoints. However, as faces become more familiar, the reliance on this area decreases, while the diagnosticity of the eye region increases. This suggests that a certain distinction can be made between the visual mechanisms subtending viewpoint invariance and face recognition in the case of unfamiliar face identification. Our results further support the idea that the eye area may only come into play when the face stimulus is particularly familiar to the observer. © 2016 Elsevier Ltd.},
note = {Publisher: Elsevier Ltd},
keywords = {accuracy, adult, article, association, attention, Bubbles, Evoked Potentials, eye fixation, Face, face profile, face recognition, Facial Recognition, facies, female, Fixation, human, human experiment, Humans, Image analysis, Individual differences, male, Ocular, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, procedures, Psychophysics, recognition, Recognition (Psychology), regression analysis, task performance, unfamiliar face matching, viewpoint variation, Viewpoint variations, Visual, visual discrimination, visual evoked potential, visual information, visual memory, visual stimulation, visual system parameters, Young Adult},
pubstate = {published},
tppubtype = {article}
}