

de Recherche et d’Innovation
en Cybersécurité et Société
Bérubé, A.; Pearson, J.; Blais, C.; Forget, H.
Stress and emotion recognition predict the relationship between a history of maltreatment and sensitive parenting behaviors: A moderated-moderation Article de journal
Dans: Development and Psychopathology, vol. 37, no 1, p. 281–291, 2025, ISSN: 09545794 (ISSN), (Publisher: Cambridge University Press).
Résumé | Liens | BibTeX | Étiquettes: adult, Adult Survivors of Child Abuse, chemistry, Child, Child Abuse, child abuse survivor, child parent relation, childhood maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, female, human, Humans, Hydrocortisone, male, mental stress, metabolism, mother, mother child relation, Mother-Child Relations, Mothers, Parenting, physiology, Preschool, preschool child, Psychological, psychology, Saliva, sensitivity, Stress, stress reactivity
@article{berube_stress_2025,
title = {Stress and emotion recognition predict the relationship between a history of maltreatment and sensitive parenting behaviors: A moderated-moderation},
author = {A. Bérubé and J. Pearson and C. Blais and H. Forget},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85182715913&doi=10.1017%2fS095457942300158X&partnerID=40&md5=b3a9056662cf94740131bfd6fbe7352e},
doi = {10.1017/S095457942300158X},
issn = {09545794 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Development and Psychopathology},
volume = {37},
number = {1},
pages = {281–291},
abstract = {Our study proposes to examine how stress and emotion recognition interact with a history of maltreatment to influence sensitive parenting behaviors. A sample of 58 mothers and their children aged between 2 and 5 years old were recruited. Parents' history of maltreatment was measured using the Child Trauma Questionnaire. An emotion recognition task was performed. Mothers identified the dominant emotion in morphed facial emotion expressions in children. Mothers and children interacted for 15 minutes. Salivary cortisol levels of mothers were collected before and after the interaction. Maternal sensitive behaviors were coded during the interaction using the Coding Interactive Behavior scheme. Results indicate that the severity of childhood maltreatment is related to less sensitive behaviors for mothers with average to good abilities in emotion recognition and lower to average increases in cortisol levels following an interaction with their children. For mothers with higher cortisol levels, there is no association between a history of maltreatment and sensitive behaviors, indicating that higher stress reactivity could act as a protective factor. Our study highlights the complex interaction between individual characteristics and environmental factors when it comes to parenting. These results argue for targeted interventions that address personal trauma. © 2024 The Author(s).},
note = {Publisher: Cambridge University Press},
keywords = {adult, Adult Survivors of Child Abuse, chemistry, Child, Child Abuse, child abuse survivor, child parent relation, childhood maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, female, human, Humans, Hydrocortisone, male, mental stress, metabolism, mother, mother child relation, Mother-Child Relations, Mothers, Parenting, physiology, Preschool, preschool child, Psychological, psychology, Saliva, sensitivity, Stress, stress reactivity},
pubstate = {published},
tppubtype = {article}
}
Charbonneau, I.; Duncan, J.; Blais, C.; Guérette, J.; Plouffe-Demers, M. -P.; Smith, F.; Fiset, D.
Facial expression categorization predominantly relies on mid-spatial frequencies Article de journal
Dans: Vision Research, vol. 231, 2025, ISSN: 00426989 (ISSN), (Publisher: Elsevier Ltd).
Résumé | Liens | BibTeX | Étiquettes: adult, article, Bubbles, Classification, controlled study, emotion, Emotions, Facial Expression, facial expressions, Facial Recognition, female, human, Humans, male, physiology, Psychophysics, simulation, Spatial frequencies, Young Adult
@article{charbonneau_facial_2025,
title = {Facial expression categorization predominantly relies on mid-spatial frequencies},
author = {I. Charbonneau and J. Duncan and C. Blais and J. Guérette and M. -P. Plouffe-Demers and F. Smith and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105003427898&doi=10.1016%2fj.visres.2025.108611&partnerID=40&md5=19b14eb2487f220c3e41cbce28fa5287},
doi = {10.1016/j.visres.2025.108611},
issn = {00426989 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Vision Research},
volume = {231},
abstract = {Facial expressions are crucial in human communication. Recent decades have seen growing interest in understanding the role of spatial frequencies (SFs) in emotion perception in others. While some studies have suggested a preferential treatment of low versus high SFs, the optimal SFs for recognizing basic facial expressions remain elusive. This study, conducted on Western participants, addresses this gap using two complementary methods: a data-driven method (Exp. 1) without arbitrary SF cut-offs, and a more naturalistic method (Exp. 2) simulating variations in viewing distance. Results generally showed a preponderant role of low over high SFs, but particularly stress that facial expression categorization mostly relies on mid-range SF content (i.e. ∼6–13 cycles per face), often overlooked in previous studies. Optimal performance was observed at short to medium viewing distances (1.2–2.4 m), declining sharply with increased distance, precisely when mid-range SFs were no longer available. Additionally, our data suggest variations in SF tuning profiles across basic facial expressions and nuanced contributions from low and mid SFs in facial expression processing. Most importantly, it suggests that any method that removes mid-SF content has the downfall of offering an incomplete account of SFs diagnosticity for facial expression recognition. © 2025 The Authors},
note = {Publisher: Elsevier Ltd},
keywords = {adult, article, Bubbles, Classification, controlled study, emotion, Emotions, Facial Expression, facial expressions, Facial Recognition, female, human, Humans, male, physiology, Psychophysics, simulation, Spatial frequencies, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Larochelle, S.; Dugas, M. J.; Langlois, F.; Gosselin, P.; Belleville, G.; Bouchard, S.
Intolerance of Uncertainty and Emotion Dysregulation as Predictors of Generalized Anxiety Disorder Severity in a Clinical Population Article de journal
Dans: Journal of Clinical Medicine, vol. 14, no 5, 2025, ISSN: 20770383 (ISSN), (Publisher: Multidisciplinary Digital Publishing Institute (MDPI)).
Résumé | Liens | BibTeX | Étiquettes: adult, aged, article, Beck Depression Inventory, clinical population, comorbidity, controlled study, Depression, difficulties in emotion regulation scale, disease severity, emotion, emotion dysregulation, emotion regulation, employment status, female, generalized anxiety disorder, human, intolerance of uncertainty, Intolerance of Uncertainty Scale, major clinical study, male, patient worry, penn state worry questionnaire, prediction, predictive model, psychotherapy, Severity of Illness Index, sociodemographics, theoretical model
@article{larochelle_intolerance_2025,
title = {Intolerance of Uncertainty and Emotion Dysregulation as Predictors of Generalized Anxiety Disorder Severity in a Clinical Population},
author = {S. Larochelle and M. J. Dugas and F. Langlois and P. Gosselin and G. Belleville and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-86000587548&doi=10.3390%2fjcm14051502&partnerID=40&md5=ea6c43194a821d0a0623385714231936},
doi = {10.3390/jcm14051502},
issn = {20770383 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Journal of Clinical Medicine},
volume = {14},
number = {5},
abstract = {Background/objectives: Several factors have been shown to play a role in the development and maintenance of generalized anxiety disorder (GAD), including intolerance of uncertainty and emotion dysregulation. Although the individual contribution of both of these factors is well documented, their combined effect has yet to be studied in a clinical population with GAD. The aim of the present study was to examine the relative contribution of intolerance of uncertainty and emotion dysregulation to the prediction of worry and GAD severity in adults with GAD. Methods: The sample consisted of 108 participants diagnosed with GAD. The participants completed measures of worry, GAD severity, depressive symptoms, intolerance of uncertainty, and emotion dysregulation. Results: Multiple regression indicated that both intolerance of uncertainty and emotion dysregulation significantly contributed to both worry and GAD severity, over and above the contribution of depressive symptoms. Of note, the model explained 36% of the variance in GAD severity scores. Conclusions: The present results provide preliminary evidence of complementarity among dominant models of GAD, and point to the potential role of integrative conceptualizations and treatment strategies for GAD. © 2025 by the authors.},
note = {Publisher: Multidisciplinary Digital Publishing Institute (MDPI)},
keywords = {adult, aged, article, Beck Depression Inventory, clinical population, comorbidity, controlled study, Depression, difficulties in emotion regulation scale, disease severity, emotion, emotion dysregulation, emotion regulation, employment status, female, generalized anxiety disorder, human, intolerance of uncertainty, Intolerance of Uncertainty Scale, major clinical study, male, patient worry, penn state worry questionnaire, prediction, predictive model, psychotherapy, Severity of Illness Index, sociodemographics, theoretical model},
pubstate = {published},
tppubtype = {article}
}
Bérubé, A.; Pétrin, R.; Blais, C.
Parental depression moderates the relationship between childhood maltreatment and the recognition of children expressions of emotions Article de journal
Dans: Frontiers in Psychiatry, vol. 15, 2024, ISSN: 16640640 (ISSN), (Publisher: Frontiers Media SA).
Résumé | Liens | BibTeX | Étiquettes: adult, anger, article, Beck Depression Inventory, Child, Child Abuse, child parent relation, childhood maltreatment, Childhood Trauma Questionnaire, Depression, disease severity, disgust, educational status, emotion, Emotion Recognition, Facial Expression, female, happiness, human, income, major clinical study, male, parent-child relationship, parental sensitivity, preschool child, questionnaire, recognition, sadness
@article{berube_parental_2024,
title = {Parental depression moderates the relationship between childhood maltreatment and the recognition of children expressions of emotions},
author = {A. Bérubé and R. Pétrin and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85196266525&doi=10.3389%2ffpsyt.2024.1374872&partnerID=40&md5=ce03a1c39e709fc0f2c773d4f82f3a10},
doi = {10.3389/fpsyt.2024.1374872},
issn = {16640640 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Frontiers in Psychiatry},
volume = {15},
abstract = {Background: Sensitivity plays a crucial role in parenting as it involves the ability to perceive and respond appropriately to children’s signals. Childhood maltreatment and depression can negatively impact adults’ ability to recognize emotions, but it is unclear which of these factors has a greater impact or how they interact. This knowledge is central to developing efficient, targeted interventions. This paper examines the interaction between parents’ depressive symptoms and childhood maltreatment and its influence on their ability to recognize the five basic emotions (happiness, anger, sadness, fear, and disgust) in children’s faces. Method: The sample consisted of 52 parents. Depressive symptoms were measured by the depression subscale of the Brief Symptom Inventory-18 (BSI-18), and maltreatment history was assessed by the Childhood Trauma Questionnaire (CTQ). Children’s emotional stimuli were morphed images created using The Child Affective Facial Expression (CAFE) database. Results: Our findings indicate that depressive symptoms moderate the relationship between parents’ history of childhood maltreatment and emotion recognition skills. Parents with higher depressive symptoms had lower emotion recognition accuracy when they had not experienced maltreatment. When childhood maltreatment was severe, emotion recognition skills were more consistent across all levels of depression. The relationship between depression and emotion recognition was primarily linked to recognizing sadness in children’s faces. Conclusion: These findings highlight how different experiences can affect parental abilities in emotion recognition and emphasize the need for interventions tailored to individual profiles to improve their effectiveness. Copyright © 2024 Bérubé, Pétrin and Blais.},
note = {Publisher: Frontiers Media SA},
keywords = {adult, anger, article, Beck Depression Inventory, Child, Child Abuse, child parent relation, childhood maltreatment, Childhood Trauma Questionnaire, Depression, disease severity, disgust, educational status, emotion, Emotion Recognition, Facial Expression, female, happiness, human, income, major clinical study, male, parent-child relationship, parental sensitivity, preschool child, questionnaire, recognition, sadness},
pubstate = {published},
tppubtype = {article}
}
Joudeh, I. O.; Cretu, A. -M.; Bouchard, S.
Predicting the Arousal and Valence Values of Emotional States Using Learned, Predesigned, and Deep Visual Features † Article de journal
Dans: Sensors, vol. 24, no 13, 2024, ISSN: 14248220 (ISSN), (Publisher: Multidisciplinary Digital Publishing Institute (MDPI)).
Résumé | Liens | BibTeX | Étiquettes: adult, Affective interaction, Arousal, artificial neural network, Cognitive state, Cognitive/emotional state, Collaborative interaction, computer, Convolutional neural networks, correlation coefficient, Deep learning, emotion, Emotional state, Emotions, female, Forecasting, Helmet mounted displays, human, Humans, Learning algorithms, Learning systems, Long short-term memory, Machine learning, Machine-learning, male, Mean square error, Neural networks, physiology, Regression, Root mean squared errors, Video recording, virtual reality, Visual feature, visual features
@article{joudeh_predicting_2024,
title = {Predicting the Arousal and Valence Values of Emotional States Using Learned, Predesigned, and Deep Visual Features †},
author = {I. O. Joudeh and A. -M. Cretu and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85198382238&doi=10.3390%2fs24134398&partnerID=40&md5=cefa8b2e2c044d02f99662af350007db},
doi = {10.3390/s24134398},
issn = {14248220 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Sensors},
volume = {24},
number = {13},
abstract = {The cognitive state of a person can be categorized using the circumplex model of emotional states, a continuous model of two dimensions: arousal and valence. The purpose of this research is to select a machine learning model(s) to be integrated into a virtual reality (VR) system that runs cognitive remediation exercises for people with mental health disorders. As such, the prediction of emotional states is essential to customize treatments for those individuals. We exploit the Remote Collaborative and Affective Interactions (RECOLA) database to predict arousal and valence values using machine learning techniques. RECOLA includes audio, video, and physiological recordings of interactions between human participants. To allow learners to focus on the most relevant data, features are extracted from raw data. Such features can be predesigned, learned, or extracted implicitly using deep learners. Our previous work on video recordings focused on predesigned and learned visual features. In this paper, we extend our work onto deep visual features. Our deep visual features are extracted using the MobileNet-v2 convolutional neural network (CNN) that we previously trained on RECOLA’s video frames of full/half faces. As the final purpose of our work is to integrate our solution into a practical VR application using head-mounted displays, we experimented with half faces as a proof of concept. The extracted deep features were then used to predict arousal and valence values via optimizable ensemble regression. We also fused the extracted visual features with the predesigned visual features and predicted arousal and valence values using the combined feature set. In an attempt to enhance our prediction performance, we further fused the predictions of the optimizable ensemble model with the predictions of the MobileNet-v2 model. After decision fusion, we achieved a root mean squared error (RMSE) of 0.1140, a Pearson’s correlation coefficient (PCC) of 0.8000, and a concordance correlation coefficient (CCC) of 0.7868 on arousal predictions. We achieved an RMSE of 0.0790, a PCC of 0.7904, and a CCC of 0.7645 on valence predictions. © 2024 by the authors.},
note = {Publisher: Multidisciplinary Digital Publishing Institute (MDPI)},
keywords = {adult, Affective interaction, Arousal, artificial neural network, Cognitive state, Cognitive/emotional state, Collaborative interaction, computer, Convolutional neural networks, correlation coefficient, Deep learning, emotion, Emotional state, Emotions, female, Forecasting, Helmet mounted displays, human, Humans, Learning algorithms, Learning systems, Long short-term memory, Machine learning, Machine-learning, male, Mean square error, Neural networks, physiology, Regression, Root mean squared errors, Video recording, virtual reality, Visual feature, visual features},
pubstate = {published},
tppubtype = {article}
}
Lévesque-Lacasse, A.; Desjardins, M. -C.; Fiset, D.; Charbonneau, C.; Cormier, S.; Blais, C.
Dans: Journal of Pain, vol. 25, no 1, p. 250–264, 2024, ISSN: 15265900, (Publisher: Elsevier B.V.).
Résumé | Liens | BibTeX | Étiquettes: anger, article, chronic pain, disgust, emotion, Emotions, Empathy, Estimation bias, expectation, eyebrow, Facial Expression, Facial expressions of pain, human, Humans, mental representation, Mental representations, motivation, Pain, pain assessment, psychology, questionnaire, reliability, reproducibility, Reproducibility of Results, Reverse correlation, sadness, sensitivity, vision, Visual Perception
@article{levesque-lacasse_relationship_2024,
title = {The Relationship Between the Ability to Infer Another's Pain and the Expectations Regarding the Appearance of Pain Facial Expressions: Investigation of the Role of Visual Perception},
author = {A. Lévesque-Lacasse and M. -C. Desjardins and D. Fiset and C. Charbonneau and S. Cormier and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85171357836&doi=10.1016%2fj.jpain.2023.08.007&partnerID=40&md5=dcfeb2e0eb9f13b42122ddfcbc987fc0},
doi = {10.1016/j.jpain.2023.08.007},
issn = {15265900},
year = {2024},
date = {2024-01-01},
journal = {Journal of Pain},
volume = {25},
number = {1},
pages = {250–264},
abstract = {Although pain is a commonly experienced and observed affective state, it is frequently misinterpreted, which leads to inadequate caregiving. Studies show the ability at estimating pain in others (estimation bias) and detecting its subtle variations (sensitivity) could emerge from independent mechanisms. While estimation bias is modulated by variables such as empathy level, pain catastrophizing tendency, and overexposure to pain, sensitivity remains unimpacted. The present study verifies if these 2 types of inaccuracies are partly explained by perceptual factors. Using reverse correlation, we measured their association with participants' mental representation of pain, or more simply put, with their expectations of what the face of a person in pain should look like. Experiment 1 shows that both parameters are associated with variations in expectations of this expression. More specifically, the estimation bias is linked with expectations characterized by salient changes in the middle face region, whereas sensitivity is associated with salient changes in the eyebrow region. Experiment 2 reveals that bias and sensitivity yield differences in emotional representations. Expectations of individuals with a lower underestimation tendency are qualitatively rated as expressing more pain and sadness, and those of individuals with a higher level of sensitivity as expressing more pain, anger, and disgust. Together, these results provide evidence for a perceptual contribution in pain inferencing that is independent of other psychosocial variables and its link to observers’ expectations. Perspective: This article reinforces the contribution of perceptual mechanisms in pain assessment. Moreover, strategies aimed to improve the reliability of individuals’ expectations regarding the appearance of facial expressions of pain could potentially be developed, and contribute to decrease inaccuracies found in pain assessment and the confusion between pain and other affective states. © 2023 United States Association for the Study of Pain, Inc.},
note = {Publisher: Elsevier B.V.},
keywords = {anger, article, chronic pain, disgust, emotion, Emotions, Empathy, Estimation bias, expectation, eyebrow, Facial Expression, Facial expressions of pain, human, Humans, mental representation, Mental representations, motivation, Pain, pain assessment, psychology, questionnaire, reliability, reproducibility, Reproducibility of Results, Reverse correlation, sadness, sensitivity, vision, Visual Perception},
pubstate = {published},
tppubtype = {article}
}
Pétrin, R.; Bérubé, A.; St-Pierre, É.; Blais, C.
Maternal childhood emotional abuse increases cardiovascular responses to children’s emotional facial expressions Article de journal
Dans: PLoS ONE, vol. 19, no 5 May, 2024, ISSN: 19326203 (ISSN), (Publisher: Public Library of Science).
Résumé | Liens | BibTeX | Étiquettes: adult, alcohol consumption, analysis of variance, article, blood pressure, cardiovascular response, Child, Child Abuse, Childhood, Childhood Trauma Questionnaire, demographics, electrocardiogram, Electrocardiography, emotion, Emotional Abuse, Emotions, Ethnicity, Facial Expression, female, Heart Rate, heart rate variability, human, human experiment, Humans, Likert scale, male, mother, mother child relation, Mother-Child Relations, Mothers, parasympathetic tone, physical activity, physiology, post hoc analysis, psychology, questionnaire, sexual abuse, Surveys and Questionnaires
@article{petrin_maternal_2024,
title = {Maternal childhood emotional abuse increases cardiovascular responses to children’s emotional facial expressions},
author = {R. Pétrin and A. Bérubé and É. St-Pierre and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85192637581&doi=10.1371%2fjournal.pone.0302782&partnerID=40&md5=c464b30fe7cc5b7b0baaf865fdf1f6de},
doi = {10.1371/journal.pone.0302782},
issn = {19326203 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {PLoS ONE},
volume = {19},
number = {5 May},
abstract = {Parents with a history of childhood maltreatment may be more likely to respond inadequately to their child’s emotional cues, such as crying or screaming, due to previous exposure to prolonged stress. While studies have investigated parents’ physiological reactions to their children’s vocal expressions of emotions, less attention has been given to their responses when perceiving children’s facial expressions of emotions. The present study aimed to determine if viewing facial expressions of emotions in children induces cardiovascular changes in mothers (hypo- or hyper-arousal) and whether these differ as a function of childhood maltreatment. A total of 104 mothers took part in this study. Their experiences of childhood maltreatment were measured using the Childhood Trauma Questionnaire (CTQ). Participants’ electrocardiogram signals were recorded during a task in which they viewed a landscape video (baseline) and images of children’s faces expressing different intensities of emotion. Heart rate variability (HRV) was extracted from the recordings as an indicator of parasympathetic reactivity. Participants presented two profiles: one group of mothers had a decreased HRV when presented with images of children’s facial expressions of emotions, while the other group’s HRV increased. However, HRV change was not significantly different between the two groups. The interaction between HRV groups and the severity of maltreatment experienced was marginal. Results suggested that experiences of childhood emotional abuse were more common in mothers whose HRV increased during the task. Therefore, more severe childhood experiences of emotional abuse could be associated with mothers’ cardiovascular hyperreactivity. Maladaptive cardiovascular responses could have a ripple effect, influencing how mothers react to their children’s facial expressions of emotions. That reaction could affect the quality of their interaction with their child. Providing interventions that help parents regulate their physiological and behavioral responses to stress might be helpful, especially if they have experienced childhood maltreatment. © 2024 Public Library of Science. All rights reserved.},
note = {Publisher: Public Library of Science},
keywords = {adult, alcohol consumption, analysis of variance, article, blood pressure, cardiovascular response, Child, Child Abuse, Childhood, Childhood Trauma Questionnaire, demographics, electrocardiogram, Electrocardiography, emotion, Emotional Abuse, Emotions, Ethnicity, Facial Expression, female, Heart Rate, heart rate variability, human, human experiment, Humans, Likert scale, male, mother, mother child relation, Mother-Child Relations, Mothers, parasympathetic tone, physical activity, physiology, post hoc analysis, psychology, questionnaire, sexual abuse, Surveys and Questionnaires},
pubstate = {published},
tppubtype = {article}
}
Bérubé, A.; Turgeon, J.; Blais, C.; Fiset, D.
Emotion Recognition in Adults With a History of Childhood Maltreatment: A Systematic Review Article de journal
Dans: Trauma, Violence, and Abuse, vol. 24, no 1, p. 278–294, 2023, ISSN: 15248380 (ISSN), (Publisher: SAGE Publications Ltd).
Résumé | Liens | BibTeX | Étiquettes: adult, anger, Child, Child Abuse, childhood maltreatment, cycle of maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, Fear, human, Humans, perception, physiology, psychology, systematic review
@article{berube_emotion_2023,
title = {Emotion Recognition in Adults With a History of Childhood Maltreatment: A Systematic Review},
author = {A. Bérubé and J. Turgeon and C. Blais and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85109658115&doi=10.1177%2f15248380211029403&partnerID=40&md5=5654c858d5c0c84bfdd832a4c04dd1d5},
doi = {10.1177/15248380211029403},
issn = {15248380 (ISSN)},
year = {2023},
date = {2023-01-01},
journal = {Trauma, Violence, and Abuse},
volume = {24},
number = {1},
pages = {278–294},
abstract = {Child maltreatment has many well-documented lasting effects on children. Among its consequences, it affects children’s recognition of emotions. More and more studies are recognizing the lasting effect that a history of maltreatment can have on emotion recognition. A systematic literature review was conducted to better understand this relationship. The Preferred Reporting Items for Systematic Reviews and Meta-Analyses (PRISMA) protocol was used and four databases were searched, MEDLINE/PubMed, PsycINFO, EMBASE, and FRANCIS, using three cross-referenced key words: child abuse, emotion recognition, and adults. The search process identified 23 studies that met the inclusion criteria. The review highlights the wide variety of measures used to assess child maltreatment as well as the different protocols used to measure emotion recognition. The results indicate that adults with a history of childhood maltreatment show a differentiated reaction to happiness, anger, and fear. Happiness is less detected, whereas negative emotions are recognized more rapidly and at a lower intensity compared to adults not exposed to such traumatic events. Emotion recognition is also related to greater brain activation for the maltreated group. However, the results are less consistent for adults who also have a diagnosis of mental health problems. The systematic review found that maltreatment affects the perception of emotions expressed on both adult and child faces. However, more research is needed to better understand how a history of maltreatment is related to adults’ perception of children’s emotions. © The Author(s) 2021.},
note = {Publisher: SAGE Publications Ltd},
keywords = {adult, anger, Child, Child Abuse, childhood maltreatment, cycle of maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, Fear, human, Humans, perception, physiology, psychology, systematic review},
pubstate = {published},
tppubtype = {article}
}
Joudeh, I. O.; Cretu, A. -M.; Bouchard, S.; Guimond, S.
Prediction of Continuous Emotional Measures through Physiological and Visual Data † Article de journal
Dans: Sensors, vol. 23, no 12, 2023, ISSN: 14248220, (Publisher: MDPI).
Résumé | Liens | BibTeX | Étiquettes: Affect recognition, Affective state, Arousal, Data-source, Deep learning, Electrocardiography, emotion, Emotion Recognition, Emotions, face recognition, Faces detection, Forecasting, human, Humans, Images processing, Learning systems, Machine learning, Machine-learning, mental disease, Mental Disorders, Physiological data, physiology, Signal-processing, Statistical tests, Video recording, Virtual-reality environment
@article{joudeh_prediction_2023,
title = {Prediction of Continuous Emotional Measures through Physiological and Visual Data †},
author = {I. O. Joudeh and A. -M. Cretu and S. Bouchard and S. Guimond},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85163943735&doi=10.3390%2fs23125613&partnerID=40&md5=5e970f0d8c5790b85d8d77a9f3f52a2d},
doi = {10.3390/s23125613},
issn = {14248220},
year = {2023},
date = {2023-01-01},
journal = {Sensors},
volume = {23},
number = {12},
abstract = {The affective state of a person can be measured using arousal and valence values. In this article, we contribute to the prediction of arousal and valence values from various data sources. Our goal is to later use such predictive models to adaptively adjust virtual reality (VR) environments and help facilitate cognitive remediation exercises for users with mental health disorders, such as schizophrenia, while avoiding discouragement. Building on our previous work on physiological, electrodermal activity (EDA) and electrocardiogram (ECG) recordings, we propose improving preprocessing and adding novel feature selection and decision fusion processes. We use video recordings as an additional data source for predicting affective states. We implement an innovative solution based on a combination of machine learning models alongside a series of preprocessing steps. We test our approach on RECOLA, a publicly available dataset. The best results are obtained with a concordance correlation coefficient (CCC) of 0.996 for arousal and 0.998 for valence using physiological data. Related work in the literature reported lower CCCs on the same data modality; thus, our approach outperforms the state-of-the-art approaches for RECOLA. Our study underscores the potential of using advanced machine learning techniques with diverse data sources to enhance the personalization of VR environments. © 2023 by the authors.},
note = {Publisher: MDPI},
keywords = {Affect recognition, Affective state, Arousal, Data-source, Deep learning, Electrocardiography, emotion, Emotion Recognition, Emotions, face recognition, Faces detection, Forecasting, human, Humans, Images processing, Learning systems, Machine learning, Machine-learning, mental disease, Mental Disorders, Physiological data, physiology, Signal-processing, Statistical tests, Video recording, Virtual-reality environment},
pubstate = {published},
tppubtype = {article}
}
Joudeh, I. O.; Cretu, A. -M.; Bouchard, S.; Guimond, S.
Prediction of Emotional States from Partial Facial Features for Virtual Reality Applications Article de journal
Dans: Annual Review of CyberTherapy and Telemedicine, vol. 21, p. 17–21, 2023, ISSN: 15548716, (Publisher: Interactive Media Institute).
Résumé | Liens | BibTeX | Étiquettes: Arousal, article, clinical article, convolutional neural network, correlation coefficient, data base, emotion, facies, female, human, human experiment, Image processing, long short term memory network, male, random forest, residual neural network, root mean squared error, videorecording, virtual reality
@article{joudeh_prediction_2023-1,
title = {Prediction of Emotional States from Partial Facial Features for Virtual Reality Applications},
author = {I. O. Joudeh and A. -M. Cretu and S. Bouchard and S. Guimond},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85182471413&partnerID=40&md5=8190e0dbb5b48ae508515f4029b0a0d1},
issn = {15548716},
year = {2023},
date = {2023-01-01},
journal = {Annual Review of CyberTherapy and Telemedicine},
volume = {21},
pages = {17–21},
abstract = {The availability of virtual reality (VR) in numerous clinical contexts has been made possible by recent technological advancements. One application is using VR for cognitive interventions with individuals who have mental disorders. Predicting the emotional states of users could help to prevent their discouragement during VR interventions. We can monitor the emotional states of individuals using sensors like an external camera, as they engage in various tasks within VR environments. The emotional state of VR users can be measured through arousal and valence, as per the Circumplex model. We used the Remote Collaborative and Affective Interactions (RECOLA) database of emotional behaviours. We processed video frames from 18 RECOLA videos. Due to the headset in VR systems, we detected faces and cropped the images of faces to use the lower half of the face only. We labeled the images with arousal and valence values to reflect various emotions. Convolutional neural networks (CNNs), specifically MobileNet-v2 and ResNets-18, were then used to predict arousal and valence values. MobileNet-v2 outperforms ResNet-18 as well as others from the literature. We achieved a root mean squared error (RMSE), Pearson’s correlation coefficient (PCC), and Concordance correlation coefficient (CCC) of 0.1495, 0.6387, and 0.6081 for arousal, and 0.0996, 0.6453, and 0.6232 for valence. Our work acts as a proof-of-concept for predicting emotional states from arousal and valence values via visual data of users immersed in VR experiences. In the future, predicted emotions could be used to automatically adjust the VR environment for individuals engaged in cognitive interventions. © 2023, Interactive Media Institute. All rights reserved.},
note = {Publisher: Interactive Media Institute},
keywords = {Arousal, article, clinical article, convolutional neural network, correlation coefficient, data base, emotion, facies, female, human, human experiment, Image processing, long short term memory network, male, random forest, residual neural network, root mean squared error, videorecording, virtual reality},
pubstate = {published},
tppubtype = {article}
}