

de Recherche et d’Innovation
en Cybersécurité et Société
Bérubé, A.; Pearson, J.; Blais, C.; Forget, H.
Stress and emotion recognition predict the relationship between a history of maltreatment and sensitive parenting behaviors: A moderated-moderation Article de journal
Dans: Development and Psychopathology, vol. 37, no 1, p. 281–291, 2025, ISSN: 09545794 (ISSN), (Publisher: Cambridge University Press).
Résumé | Liens | BibTeX | Étiquettes: adult, Adult Survivors of Child Abuse, chemistry, Child, Child Abuse, child abuse survivor, child parent relation, childhood maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, female, human, Humans, Hydrocortisone, male, mental stress, metabolism, mother, mother child relation, Mother-Child Relations, Mothers, Parenting, physiology, Preschool, preschool child, Psychological, psychology, Saliva, sensitivity, Stress, stress reactivity
@article{berube_stress_2025,
title = {Stress and emotion recognition predict the relationship between a history of maltreatment and sensitive parenting behaviors: A moderated-moderation},
author = {A. Bérubé and J. Pearson and C. Blais and H. Forget},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85182715913&doi=10.1017%2fS095457942300158X&partnerID=40&md5=b3a9056662cf94740131bfd6fbe7352e},
doi = {10.1017/S095457942300158X},
issn = {09545794 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Development and Psychopathology},
volume = {37},
number = {1},
pages = {281–291},
abstract = {Our study proposes to examine how stress and emotion recognition interact with a history of maltreatment to influence sensitive parenting behaviors. A sample of 58 mothers and their children aged between 2 and 5 years old were recruited. Parents' history of maltreatment was measured using the Child Trauma Questionnaire. An emotion recognition task was performed. Mothers identified the dominant emotion in morphed facial emotion expressions in children. Mothers and children interacted for 15 minutes. Salivary cortisol levels of mothers were collected before and after the interaction. Maternal sensitive behaviors were coded during the interaction using the Coding Interactive Behavior scheme. Results indicate that the severity of childhood maltreatment is related to less sensitive behaviors for mothers with average to good abilities in emotion recognition and lower to average increases in cortisol levels following an interaction with their children. For mothers with higher cortisol levels, there is no association between a history of maltreatment and sensitive behaviors, indicating that higher stress reactivity could act as a protective factor. Our study highlights the complex interaction between individual characteristics and environmental factors when it comes to parenting. These results argue for targeted interventions that address personal trauma. © 2024 The Author(s).},
note = {Publisher: Cambridge University Press},
keywords = {adult, Adult Survivors of Child Abuse, chemistry, Child, Child Abuse, child abuse survivor, child parent relation, childhood maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, female, human, Humans, Hydrocortisone, male, mental stress, metabolism, mother, mother child relation, Mother-Child Relations, Mothers, Parenting, physiology, Preschool, preschool child, Psychological, psychology, Saliva, sensitivity, Stress, stress reactivity},
pubstate = {published},
tppubtype = {article}
}
Charbonneau, I.; Duncan, J.; Blais, C.; Guérette, J.; Plouffe-Demers, M. -P.; Smith, F.; Fiset, D.
Facial expression categorization predominantly relies on mid-spatial frequencies Article de journal
Dans: Vision Research, vol. 231, 2025, ISSN: 00426989 (ISSN), (Publisher: Elsevier Ltd).
Résumé | Liens | BibTeX | Étiquettes: adult, article, Bubbles, Classification, controlled study, emotion, Emotions, Facial Expression, facial expressions, Facial Recognition, female, human, Humans, male, physiology, Psychophysics, simulation, Spatial frequencies, Young Adult
@article{charbonneau_facial_2025,
title = {Facial expression categorization predominantly relies on mid-spatial frequencies},
author = {I. Charbonneau and J. Duncan and C. Blais and J. Guérette and M. -P. Plouffe-Demers and F. Smith and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105003427898&doi=10.1016%2fj.visres.2025.108611&partnerID=40&md5=19b14eb2487f220c3e41cbce28fa5287},
doi = {10.1016/j.visres.2025.108611},
issn = {00426989 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Vision Research},
volume = {231},
abstract = {Facial expressions are crucial in human communication. Recent decades have seen growing interest in understanding the role of spatial frequencies (SFs) in emotion perception in others. While some studies have suggested a preferential treatment of low versus high SFs, the optimal SFs for recognizing basic facial expressions remain elusive. This study, conducted on Western participants, addresses this gap using two complementary methods: a data-driven method (Exp. 1) without arbitrary SF cut-offs, and a more naturalistic method (Exp. 2) simulating variations in viewing distance. Results generally showed a preponderant role of low over high SFs, but particularly stress that facial expression categorization mostly relies on mid-range SF content (i.e. ∼6–13 cycles per face), often overlooked in previous studies. Optimal performance was observed at short to medium viewing distances (1.2–2.4 m), declining sharply with increased distance, precisely when mid-range SFs were no longer available. Additionally, our data suggest variations in SF tuning profiles across basic facial expressions and nuanced contributions from low and mid SFs in facial expression processing. Most importantly, it suggests that any method that removes mid-SF content has the downfall of offering an incomplete account of SFs diagnosticity for facial expression recognition. © 2025 The Authors},
note = {Publisher: Elsevier Ltd},
keywords = {adult, article, Bubbles, Classification, controlled study, emotion, Emotions, Facial Expression, facial expressions, Facial Recognition, female, human, Humans, male, physiology, Psychophysics, simulation, Spatial frequencies, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Joudeh, I. O.; Cretu, A. -M.; Bouchard, S.
Predicting the Arousal and Valence Values of Emotional States Using Learned, Predesigned, and Deep Visual Features † Article de journal
Dans: Sensors, vol. 24, no 13, 2024, ISSN: 14248220 (ISSN), (Publisher: Multidisciplinary Digital Publishing Institute (MDPI)).
Résumé | Liens | BibTeX | Étiquettes: adult, Affective interaction, Arousal, artificial neural network, Cognitive state, Cognitive/emotional state, Collaborative interaction, computer, Convolutional neural networks, correlation coefficient, Deep learning, emotion, Emotional state, Emotions, female, Forecasting, Helmet mounted displays, human, Humans, Learning algorithms, Learning systems, Long short-term memory, Machine learning, Machine-learning, male, Mean square error, Neural networks, physiology, Regression, Root mean squared errors, Video recording, virtual reality, Visual feature, visual features
@article{joudeh_predicting_2024,
title = {Predicting the Arousal and Valence Values of Emotional States Using Learned, Predesigned, and Deep Visual Features †},
author = {I. O. Joudeh and A. -M. Cretu and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85198382238&doi=10.3390%2fs24134398&partnerID=40&md5=cefa8b2e2c044d02f99662af350007db},
doi = {10.3390/s24134398},
issn = {14248220 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Sensors},
volume = {24},
number = {13},
abstract = {The cognitive state of a person can be categorized using the circumplex model of emotional states, a continuous model of two dimensions: arousal and valence. The purpose of this research is to select a machine learning model(s) to be integrated into a virtual reality (VR) system that runs cognitive remediation exercises for people with mental health disorders. As such, the prediction of emotional states is essential to customize treatments for those individuals. We exploit the Remote Collaborative and Affective Interactions (RECOLA) database to predict arousal and valence values using machine learning techniques. RECOLA includes audio, video, and physiological recordings of interactions between human participants. To allow learners to focus on the most relevant data, features are extracted from raw data. Such features can be predesigned, learned, or extracted implicitly using deep learners. Our previous work on video recordings focused on predesigned and learned visual features. In this paper, we extend our work onto deep visual features. Our deep visual features are extracted using the MobileNet-v2 convolutional neural network (CNN) that we previously trained on RECOLA’s video frames of full/half faces. As the final purpose of our work is to integrate our solution into a practical VR application using head-mounted displays, we experimented with half faces as a proof of concept. The extracted deep features were then used to predict arousal and valence values via optimizable ensemble regression. We also fused the extracted visual features with the predesigned visual features and predicted arousal and valence values using the combined feature set. In an attempt to enhance our prediction performance, we further fused the predictions of the optimizable ensemble model with the predictions of the MobileNet-v2 model. After decision fusion, we achieved a root mean squared error (RMSE) of 0.1140, a Pearson’s correlation coefficient (PCC) of 0.8000, and a concordance correlation coefficient (CCC) of 0.7868 on arousal predictions. We achieved an RMSE of 0.0790, a PCC of 0.7904, and a CCC of 0.7645 on valence predictions. © 2024 by the authors.},
note = {Publisher: Multidisciplinary Digital Publishing Institute (MDPI)},
keywords = {adult, Affective interaction, Arousal, artificial neural network, Cognitive state, Cognitive/emotional state, Collaborative interaction, computer, Convolutional neural networks, correlation coefficient, Deep learning, emotion, Emotional state, Emotions, female, Forecasting, Helmet mounted displays, human, Humans, Learning algorithms, Learning systems, Long short-term memory, Machine learning, Machine-learning, male, Mean square error, Neural networks, physiology, Regression, Root mean squared errors, Video recording, virtual reality, Visual feature, visual features},
pubstate = {published},
tppubtype = {article}
}
Lévesque-Lacasse, A.; Desjardins, M. -C.; Fiset, D.; Charbonneau, C.; Cormier, S.; Blais, C.
Dans: Journal of Pain, vol. 25, no 1, p. 250–264, 2024, ISSN: 15265900, (Publisher: Elsevier B.V.).
Résumé | Liens | BibTeX | Étiquettes: anger, article, chronic pain, disgust, emotion, Emotions, Empathy, Estimation bias, expectation, eyebrow, Facial Expression, Facial expressions of pain, human, Humans, mental representation, Mental representations, motivation, Pain, pain assessment, psychology, questionnaire, reliability, reproducibility, Reproducibility of Results, Reverse correlation, sadness, sensitivity, vision, Visual Perception
@article{levesque-lacasse_relationship_2024,
title = {The Relationship Between the Ability to Infer Another's Pain and the Expectations Regarding the Appearance of Pain Facial Expressions: Investigation of the Role of Visual Perception},
author = {A. Lévesque-Lacasse and M. -C. Desjardins and D. Fiset and C. Charbonneau and S. Cormier and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85171357836&doi=10.1016%2fj.jpain.2023.08.007&partnerID=40&md5=dcfeb2e0eb9f13b42122ddfcbc987fc0},
doi = {10.1016/j.jpain.2023.08.007},
issn = {15265900},
year = {2024},
date = {2024-01-01},
journal = {Journal of Pain},
volume = {25},
number = {1},
pages = {250–264},
abstract = {Although pain is a commonly experienced and observed affective state, it is frequently misinterpreted, which leads to inadequate caregiving. Studies show the ability at estimating pain in others (estimation bias) and detecting its subtle variations (sensitivity) could emerge from independent mechanisms. While estimation bias is modulated by variables such as empathy level, pain catastrophizing tendency, and overexposure to pain, sensitivity remains unimpacted. The present study verifies if these 2 types of inaccuracies are partly explained by perceptual factors. Using reverse correlation, we measured their association with participants' mental representation of pain, or more simply put, with their expectations of what the face of a person in pain should look like. Experiment 1 shows that both parameters are associated with variations in expectations of this expression. More specifically, the estimation bias is linked with expectations characterized by salient changes in the middle face region, whereas sensitivity is associated with salient changes in the eyebrow region. Experiment 2 reveals that bias and sensitivity yield differences in emotional representations. Expectations of individuals with a lower underestimation tendency are qualitatively rated as expressing more pain and sadness, and those of individuals with a higher level of sensitivity as expressing more pain, anger, and disgust. Together, these results provide evidence for a perceptual contribution in pain inferencing that is independent of other psychosocial variables and its link to observers’ expectations. Perspective: This article reinforces the contribution of perceptual mechanisms in pain assessment. Moreover, strategies aimed to improve the reliability of individuals’ expectations regarding the appearance of facial expressions of pain could potentially be developed, and contribute to decrease inaccuracies found in pain assessment and the confusion between pain and other affective states. © 2023 United States Association for the Study of Pain, Inc.},
note = {Publisher: Elsevier B.V.},
keywords = {anger, article, chronic pain, disgust, emotion, Emotions, Empathy, Estimation bias, expectation, eyebrow, Facial Expression, Facial expressions of pain, human, Humans, mental representation, Mental representations, motivation, Pain, pain assessment, psychology, questionnaire, reliability, reproducibility, Reproducibility of Results, Reverse correlation, sadness, sensitivity, vision, Visual Perception},
pubstate = {published},
tppubtype = {article}
}
Pétrin, R.; Bérubé, A.; St-Pierre, É.; Blais, C.
Maternal childhood emotional abuse increases cardiovascular responses to children’s emotional facial expressions Article de journal
Dans: PLoS ONE, vol. 19, no 5 May, 2024, ISSN: 19326203 (ISSN), (Publisher: Public Library of Science).
Résumé | Liens | BibTeX | Étiquettes: adult, alcohol consumption, analysis of variance, article, blood pressure, cardiovascular response, Child, Child Abuse, Childhood, Childhood Trauma Questionnaire, demographics, electrocardiogram, Electrocardiography, emotion, Emotional Abuse, Emotions, Ethnicity, Facial Expression, female, Heart Rate, heart rate variability, human, human experiment, Humans, Likert scale, male, mother, mother child relation, Mother-Child Relations, Mothers, parasympathetic tone, physical activity, physiology, post hoc analysis, psychology, questionnaire, sexual abuse, Surveys and Questionnaires
@article{petrin_maternal_2024,
title = {Maternal childhood emotional abuse increases cardiovascular responses to children’s emotional facial expressions},
author = {R. Pétrin and A. Bérubé and É. St-Pierre and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85192637581&doi=10.1371%2fjournal.pone.0302782&partnerID=40&md5=c464b30fe7cc5b7b0baaf865fdf1f6de},
doi = {10.1371/journal.pone.0302782},
issn = {19326203 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {PLoS ONE},
volume = {19},
number = {5 May},
abstract = {Parents with a history of childhood maltreatment may be more likely to respond inadequately to their child’s emotional cues, such as crying or screaming, due to previous exposure to prolonged stress. While studies have investigated parents’ physiological reactions to their children’s vocal expressions of emotions, less attention has been given to their responses when perceiving children’s facial expressions of emotions. The present study aimed to determine if viewing facial expressions of emotions in children induces cardiovascular changes in mothers (hypo- or hyper-arousal) and whether these differ as a function of childhood maltreatment. A total of 104 mothers took part in this study. Their experiences of childhood maltreatment were measured using the Childhood Trauma Questionnaire (CTQ). Participants’ electrocardiogram signals were recorded during a task in which they viewed a landscape video (baseline) and images of children’s faces expressing different intensities of emotion. Heart rate variability (HRV) was extracted from the recordings as an indicator of parasympathetic reactivity. Participants presented two profiles: one group of mothers had a decreased HRV when presented with images of children’s facial expressions of emotions, while the other group’s HRV increased. However, HRV change was not significantly different between the two groups. The interaction between HRV groups and the severity of maltreatment experienced was marginal. Results suggested that experiences of childhood emotional abuse were more common in mothers whose HRV increased during the task. Therefore, more severe childhood experiences of emotional abuse could be associated with mothers’ cardiovascular hyperreactivity. Maladaptive cardiovascular responses could have a ripple effect, influencing how mothers react to their children’s facial expressions of emotions. That reaction could affect the quality of their interaction with their child. Providing interventions that help parents regulate their physiological and behavioral responses to stress might be helpful, especially if they have experienced childhood maltreatment. © 2024 Public Library of Science. All rights reserved.},
note = {Publisher: Public Library of Science},
keywords = {adult, alcohol consumption, analysis of variance, article, blood pressure, cardiovascular response, Child, Child Abuse, Childhood, Childhood Trauma Questionnaire, demographics, electrocardiogram, Electrocardiography, emotion, Emotional Abuse, Emotions, Ethnicity, Facial Expression, female, Heart Rate, heart rate variability, human, human experiment, Humans, Likert scale, male, mother, mother child relation, Mother-Child Relations, Mothers, parasympathetic tone, physical activity, physiology, post hoc analysis, psychology, questionnaire, sexual abuse, Surveys and Questionnaires},
pubstate = {published},
tppubtype = {article}
}
Bérubé, A.; Turgeon, J.; Blais, C.; Fiset, D.
Emotion Recognition in Adults With a History of Childhood Maltreatment: A Systematic Review Article de journal
Dans: Trauma, Violence, and Abuse, vol. 24, no 1, p. 278–294, 2023, ISSN: 15248380 (ISSN), (Publisher: SAGE Publications Ltd).
Résumé | Liens | BibTeX | Étiquettes: adult, anger, Child, Child Abuse, childhood maltreatment, cycle of maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, Fear, human, Humans, perception, physiology, psychology, systematic review
@article{berube_emotion_2023,
title = {Emotion Recognition in Adults With a History of Childhood Maltreatment: A Systematic Review},
author = {A. Bérubé and J. Turgeon and C. Blais and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85109658115&doi=10.1177%2f15248380211029403&partnerID=40&md5=5654c858d5c0c84bfdd832a4c04dd1d5},
doi = {10.1177/15248380211029403},
issn = {15248380 (ISSN)},
year = {2023},
date = {2023-01-01},
journal = {Trauma, Violence, and Abuse},
volume = {24},
number = {1},
pages = {278–294},
abstract = {Child maltreatment has many well-documented lasting effects on children. Among its consequences, it affects children’s recognition of emotions. More and more studies are recognizing the lasting effect that a history of maltreatment can have on emotion recognition. A systematic literature review was conducted to better understand this relationship. The Preferred Reporting Items for Systematic Reviews and Meta-Analyses (PRISMA) protocol was used and four databases were searched, MEDLINE/PubMed, PsycINFO, EMBASE, and FRANCIS, using three cross-referenced key words: child abuse, emotion recognition, and adults. The search process identified 23 studies that met the inclusion criteria. The review highlights the wide variety of measures used to assess child maltreatment as well as the different protocols used to measure emotion recognition. The results indicate that adults with a history of childhood maltreatment show a differentiated reaction to happiness, anger, and fear. Happiness is less detected, whereas negative emotions are recognized more rapidly and at a lower intensity compared to adults not exposed to such traumatic events. Emotion recognition is also related to greater brain activation for the maltreated group. However, the results are less consistent for adults who also have a diagnosis of mental health problems. The systematic review found that maltreatment affects the perception of emotions expressed on both adult and child faces. However, more research is needed to better understand how a history of maltreatment is related to adults’ perception of children’s emotions. © The Author(s) 2021.},
note = {Publisher: SAGE Publications Ltd},
keywords = {adult, anger, Child, Child Abuse, childhood maltreatment, cycle of maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, Fear, human, Humans, perception, physiology, psychology, systematic review},
pubstate = {published},
tppubtype = {article}
}
Joudeh, I. O.; Cretu, A. -M.; Bouchard, S.; Guimond, S.
Prediction of Continuous Emotional Measures through Physiological and Visual Data † Article de journal
Dans: Sensors, vol. 23, no 12, 2023, ISSN: 14248220, (Publisher: MDPI).
Résumé | Liens | BibTeX | Étiquettes: Affect recognition, Affective state, Arousal, Data-source, Deep learning, Electrocardiography, emotion, Emotion Recognition, Emotions, face recognition, Faces detection, Forecasting, human, Humans, Images processing, Learning systems, Machine learning, Machine-learning, mental disease, Mental Disorders, Physiological data, physiology, Signal-processing, Statistical tests, Video recording, Virtual-reality environment
@article{joudeh_prediction_2023,
title = {Prediction of Continuous Emotional Measures through Physiological and Visual Data †},
author = {I. O. Joudeh and A. -M. Cretu and S. Bouchard and S. Guimond},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85163943735&doi=10.3390%2fs23125613&partnerID=40&md5=5e970f0d8c5790b85d8d77a9f3f52a2d},
doi = {10.3390/s23125613},
issn = {14248220},
year = {2023},
date = {2023-01-01},
journal = {Sensors},
volume = {23},
number = {12},
abstract = {The affective state of a person can be measured using arousal and valence values. In this article, we contribute to the prediction of arousal and valence values from various data sources. Our goal is to later use such predictive models to adaptively adjust virtual reality (VR) environments and help facilitate cognitive remediation exercises for users with mental health disorders, such as schizophrenia, while avoiding discouragement. Building on our previous work on physiological, electrodermal activity (EDA) and electrocardiogram (ECG) recordings, we propose improving preprocessing and adding novel feature selection and decision fusion processes. We use video recordings as an additional data source for predicting affective states. We implement an innovative solution based on a combination of machine learning models alongside a series of preprocessing steps. We test our approach on RECOLA, a publicly available dataset. The best results are obtained with a concordance correlation coefficient (CCC) of 0.996 for arousal and 0.998 for valence using physiological data. Related work in the literature reported lower CCCs on the same data modality; thus, our approach outperforms the state-of-the-art approaches for RECOLA. Our study underscores the potential of using advanced machine learning techniques with diverse data sources to enhance the personalization of VR environments. © 2023 by the authors.},
note = {Publisher: MDPI},
keywords = {Affect recognition, Affective state, Arousal, Data-source, Deep learning, Electrocardiography, emotion, Emotion Recognition, Emotions, face recognition, Faces detection, Forecasting, human, Humans, Images processing, Learning systems, Machine learning, Machine-learning, mental disease, Mental Disorders, Physiological data, physiology, Signal-processing, Statistical tests, Video recording, Virtual-reality environment},
pubstate = {published},
tppubtype = {article}
}
Gingras, F.; Fiset, D.; Plouffe-Demers, M. -P.; Deschênes, A.; Cormier, S.; Forget, H.; Blais, C.
Pain in the eye of the beholder: Variations in pain visual representations as a function of face ethnicity and culture Article de journal
Dans: British Journal of Psychology, vol. 114, no 3, p. 621–637, 2023, ISSN: 00071269, (Publisher: John Wiley and Sons Ltd).
Résumé | Liens | BibTeX | Étiquettes: Caucasian, emotion, Emotions, Ethnicity, human, Humans, Pain, psychology, White People
@article{gingras_pain_2023,
title = {Pain in the eye of the beholder: Variations in pain visual representations as a function of face ethnicity and culture},
author = {F. Gingras and D. Fiset and M. -P. Plouffe-Demers and A. Deschênes and S. Cormier and H. Forget and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85149411004&doi=10.1111%2fbjop.12641&partnerID=40&md5=eb36c9f5071b30edaff22935109abcea},
doi = {10.1111/bjop.12641},
issn = {00071269},
year = {2023},
date = {2023-01-01},
journal = {British Journal of Psychology},
volume = {114},
number = {3},
pages = {621–637},
abstract = {Pain experienced by Black individuals is systematically underestimated, and recent studies have shown that part of this bias is rooted in perceptual factors. We used Reverse Correlation to estimate visual representations of the pain expression in Black and White faces, in participants originating from both Western and African countries. Groups of raters were then asked to evaluate the presence of pain and other emotions in these representations. A second group of White raters then evaluated those same representations placed over a neutral background face (50% White; 50% Black). Image-based analyses show significant effects of culture and face ethnicity, but no interaction between the two factors. Western representations were more likely to be judged as expressing pain than African representations. For both cultural groups, raters also perceived more pain in White face representations than in Black face representations. However, when changing the background stimulus to the neutral background face, this effect of face ethnic profile disappeared. Overall, these results suggest that individuals have different expectations of how pain is expressed by Black and White individuals, and that cultural factors may explain a part of this phenomenon. © 2023 The Authors. British Journal of Psychology published by John Wiley & Sons Ltd on behalf of The British Psychological Society.},
note = {Publisher: John Wiley and Sons Ltd},
keywords = {Caucasian, emotion, Emotions, Ethnicity, human, Humans, Pain, psychology, White People},
pubstate = {published},
tppubtype = {article}
}
Charbonneau, I.; Guérette, J.; Cormier, S.; Blais, C.; Lalonde-Beaudoin, G.; Smith, F. W.; Fiset, D.
The role of spatial frequencies for facial pain categorization Article de journal
Dans: Scientific Reports, vol. 11, no 1, 2021, ISSN: 20452322, (Publisher: Nature Research).
Résumé | Liens | BibTeX | Étiquettes: Adolescent, adult, Classification, Distance Perception, emotion, Emotions, Face, face pain, Facial Expression, Facial Pain, Facial Recognition, female, human, Humans, Knowledge, male, Normal Distribution, Pattern Recognition, procedures, psychology, Psychophysics, recognition, reproducibility, Reproducibility of Results, Visual, Young Adult
@article{charbonneau_role_2021,
title = {The role of spatial frequencies for facial pain categorization},
author = {I. Charbonneau and J. Guérette and S. Cormier and C. Blais and G. Lalonde-Beaudoin and F. W. Smith and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85111138273&doi=10.1038%2fs41598-021-93776-7&partnerID=40&md5=d759d0218de65fce371bb51d7f2593d8},
doi = {10.1038/s41598-021-93776-7},
issn = {20452322},
year = {2021},
date = {2021-01-01},
journal = {Scientific Reports},
volume = {11},
number = {1},
abstract = {Studies on low-level visual information underlying pain categorization have led to inconsistent findings. Some show an advantage for low spatial frequency information (SFs) and others a preponderance of mid SFs. This study aims to clarify this gap in knowledge since these results have different theoretical and practical implications, such as how far away an observer can be in order to categorize pain. This study addresses this question by using two complementary methods: a data-driven method without a priori expectations about the most useful SFs for pain recognition and a more ecological method that simulates the distance of stimuli presentation. We reveal a broad range of important SFs for pain recognition starting from low to relatively high SFs and showed that performance is optimal in a short to medium distance (1.2–4.8 m) but declines significantly when mid SFs are no longer available. This study reconciles previous results that show an advantage of LSFs over HSFs when using arbitrary cutoffs, but above all reveal the prominent role of mid-SFs for pain recognition across two complementary experimental tasks. © 2021, The Author(s).},
note = {Publisher: Nature Research},
keywords = {Adolescent, adult, Classification, Distance Perception, emotion, Emotions, Face, face pain, Facial Expression, Facial Pain, Facial Recognition, female, human, Humans, Knowledge, male, Normal Distribution, Pattern Recognition, procedures, psychology, Psychophysics, recognition, reproducibility, Reproducibility of Results, Visual, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Bérubé, A.; Blais, C.; Fournier, A.; Turgeon, J.; Forget, H.; Coutu, S.; Dubeau, D.
Childhood maltreatment moderates the relationship between emotion recognition and maternal sensitive behaviors Article de journal
Dans: Child Abuse and Neglect, vol. 102, 2020, ISSN: 01452134 (ISSN), (Publisher: Elsevier Ltd).
Résumé | Liens | BibTeX | Étiquettes: article, Child, Child Abuse, childhood maltreatment, Childhood Trauma Questionnaire, emotion, Emotion Recognition, Emotions, female, human, human experiment, Humans, male, Maternal Behavior, mother child relation, Mother-Child Relations, photography, physiology, Preschool, preschool child, psychology, Sensitive behaviors
@article{berube_childhood_2020,
title = {Childhood maltreatment moderates the relationship between emotion recognition and maternal sensitive behaviors},
author = {A. Bérubé and C. Blais and A. Fournier and J. Turgeon and H. Forget and S. Coutu and D. Dubeau},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85079890346&doi=10.1016%2fj.chiabu.2020.104432&partnerID=40&md5=05add864de22734e614fe7a34d6d6f1a},
doi = {10.1016/j.chiabu.2020.104432},
issn = {01452134 (ISSN)},
year = {2020},
date = {2020-01-01},
journal = {Child Abuse and Neglect},
volume = {102},
abstract = {Background: Sensitivity is defined as parents ability to perceive, react and respond to children signals. Having a history of childhood maltreatment changes the way adults perceive visual emotions. These perceptual characteristics could have important consequences on how these parents respond to their children. Objective: The current study examines how a history of childhood maltreatment moderates the relationship between maternal emotion recognition in child faces and sensitive behaviors toward their child during free-play and a structured task. Participants and Setting: Participants included 58 mothers and their children aged between 2 and 5 years. Methods: Mothers were exposed to a set of photographs of child faces showing morphed images of the six basic emotional expressions. Mother-child interactions were then coded for sensitive behaviors. Mothers’ history of childhood maltreatment was assessed using the Childhood Trauma Questionnaire. Results: Maltreatment severity was related to poorer abilities in emotion recognition. However, the association between emotion recognition and sensitive behavior was moderate by history of childhood maltreatment. For mothers exposed to a severe form of childhood maltreatment, a better emotion recognition was related to less sensitive behaviors toward the child, both during free-play and the structured task. Conclusion: This relationship is unique to these mothers and is inconsistent with Ainsworth's definition of sensitivity. These results have important implications as they suggest mothers with a history of severe maltreatment would need tailored interventions which take into account their particular reactions to children's emotions. © 2020},
note = {Publisher: Elsevier Ltd},
keywords = {article, Child, Child Abuse, childhood maltreatment, Childhood Trauma Questionnaire, emotion, Emotion Recognition, Emotions, female, human, human experiment, Humans, male, Maternal Behavior, mother child relation, Mother-Child Relations, photography, physiology, Preschool, preschool child, psychology, Sensitive behaviors},
pubstate = {published},
tppubtype = {article}
}