

de Recherche et d’Innovation
en Cybersécurité et Société
Lévesque-Lacasse, A.; Desjardins, M. -C.; Fiset, D.; Charbonneau, C.; Cormier, S.; Blais, C.
In: Journal of Pain, vol. 25, no. 1, pp. 250–264, 2024, ISSN: 15265900, (Publisher: Elsevier B.V.).
Abstract | Links | BibTeX | Tags: anger, article, chronic pain, disgust, emotion, Emotions, Empathy, Estimation bias, expectation, eyebrow, Facial Expression, Facial expressions of pain, human, Humans, mental representation, Mental representations, motivation, Pain, pain assessment, psychology, questionnaire, reliability, reproducibility, Reproducibility of Results, Reverse correlation, sadness, sensitivity, vision, Visual Perception
@article{levesque-lacasse_relationship_2024,
title = {The Relationship Between the Ability to Infer Another's Pain and the Expectations Regarding the Appearance of Pain Facial Expressions: Investigation of the Role of Visual Perception},
author = {A. Lévesque-Lacasse and M. -C. Desjardins and D. Fiset and C. Charbonneau and S. Cormier and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85171357836&doi=10.1016%2fj.jpain.2023.08.007&partnerID=40&md5=dcfeb2e0eb9f13b42122ddfcbc987fc0},
doi = {10.1016/j.jpain.2023.08.007},
issn = {15265900},
year = {2024},
date = {2024-01-01},
journal = {Journal of Pain},
volume = {25},
number = {1},
pages = {250–264},
abstract = {Although pain is a commonly experienced and observed affective state, it is frequently misinterpreted, which leads to inadequate caregiving. Studies show the ability at estimating pain in others (estimation bias) and detecting its subtle variations (sensitivity) could emerge from independent mechanisms. While estimation bias is modulated by variables such as empathy level, pain catastrophizing tendency, and overexposure to pain, sensitivity remains unimpacted. The present study verifies if these 2 types of inaccuracies are partly explained by perceptual factors. Using reverse correlation, we measured their association with participants' mental representation of pain, or more simply put, with their expectations of what the face of a person in pain should look like. Experiment 1 shows that both parameters are associated with variations in expectations of this expression. More specifically, the estimation bias is linked with expectations characterized by salient changes in the middle face region, whereas sensitivity is associated with salient changes in the eyebrow region. Experiment 2 reveals that bias and sensitivity yield differences in emotional representations. Expectations of individuals with a lower underestimation tendency are qualitatively rated as expressing more pain and sadness, and those of individuals with a higher level of sensitivity as expressing more pain, anger, and disgust. Together, these results provide evidence for a perceptual contribution in pain inferencing that is independent of other psychosocial variables and its link to observers’ expectations. Perspective: This article reinforces the contribution of perceptual mechanisms in pain assessment. Moreover, strategies aimed to improve the reliability of individuals’ expectations regarding the appearance of facial expressions of pain could potentially be developed, and contribute to decrease inaccuracies found in pain assessment and the confusion between pain and other affective states. © 2023 United States Association for the Study of Pain, Inc.},
note = {Publisher: Elsevier B.V.},
keywords = {anger, article, chronic pain, disgust, emotion, Emotions, Empathy, Estimation bias, expectation, eyebrow, Facial Expression, Facial expressions of pain, human, Humans, mental representation, Mental representations, motivation, Pain, pain assessment, psychology, questionnaire, reliability, reproducibility, Reproducibility of Results, Reverse correlation, sadness, sensitivity, vision, Visual Perception},
pubstate = {published},
tppubtype = {article}
}
Pétrin, R.; Bérubé, A.; St-Pierre, É.; Blais, C.
Maternal childhood emotional abuse increases cardiovascular responses to children’s emotional facial expressions Journal Article
In: PLoS ONE, vol. 19, no. 5 May, 2024, ISSN: 19326203 (ISSN), (Publisher: Public Library of Science).
Abstract | Links | BibTeX | Tags: adult, alcohol consumption, analysis of variance, article, blood pressure, cardiovascular response, Child, Child Abuse, Childhood, Childhood Trauma Questionnaire, demographics, electrocardiogram, Electrocardiography, emotion, Emotional Abuse, Emotions, Ethnicity, Facial Expression, female, Heart Rate, heart rate variability, human, human experiment, Humans, Likert scale, male, mother, mother child relation, Mother-Child Relations, Mothers, parasympathetic tone, physical activity, physiology, post hoc analysis, psychology, questionnaire, sexual abuse, Surveys and Questionnaires
@article{petrin_maternal_2024,
title = {Maternal childhood emotional abuse increases cardiovascular responses to children’s emotional facial expressions},
author = {R. Pétrin and A. Bérubé and É. St-Pierre and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85192637581&doi=10.1371%2fjournal.pone.0302782&partnerID=40&md5=c464b30fe7cc5b7b0baaf865fdf1f6de},
doi = {10.1371/journal.pone.0302782},
issn = {19326203 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {PLoS ONE},
volume = {19},
number = {5 May},
abstract = {Parents with a history of childhood maltreatment may be more likely to respond inadequately to their child’s emotional cues, such as crying or screaming, due to previous exposure to prolonged stress. While studies have investigated parents’ physiological reactions to their children’s vocal expressions of emotions, less attention has been given to their responses when perceiving children’s facial expressions of emotions. The present study aimed to determine if viewing facial expressions of emotions in children induces cardiovascular changes in mothers (hypo- or hyper-arousal) and whether these differ as a function of childhood maltreatment. A total of 104 mothers took part in this study. Their experiences of childhood maltreatment were measured using the Childhood Trauma Questionnaire (CTQ). Participants’ electrocardiogram signals were recorded during a task in which they viewed a landscape video (baseline) and images of children’s faces expressing different intensities of emotion. Heart rate variability (HRV) was extracted from the recordings as an indicator of parasympathetic reactivity. Participants presented two profiles: one group of mothers had a decreased HRV when presented with images of children’s facial expressions of emotions, while the other group’s HRV increased. However, HRV change was not significantly different between the two groups. The interaction between HRV groups and the severity of maltreatment experienced was marginal. Results suggested that experiences of childhood emotional abuse were more common in mothers whose HRV increased during the task. Therefore, more severe childhood experiences of emotional abuse could be associated with mothers’ cardiovascular hyperreactivity. Maladaptive cardiovascular responses could have a ripple effect, influencing how mothers react to their children’s facial expressions of emotions. That reaction could affect the quality of their interaction with their child. Providing interventions that help parents regulate their physiological and behavioral responses to stress might be helpful, especially if they have experienced childhood maltreatment. © 2024 Public Library of Science. All rights reserved.},
note = {Publisher: Public Library of Science},
keywords = {adult, alcohol consumption, analysis of variance, article, blood pressure, cardiovascular response, Child, Child Abuse, Childhood, Childhood Trauma Questionnaire, demographics, electrocardiogram, Electrocardiography, emotion, Emotional Abuse, Emotions, Ethnicity, Facial Expression, female, Heart Rate, heart rate variability, human, human experiment, Humans, Likert scale, male, mother, mother child relation, Mother-Child Relations, Mothers, parasympathetic tone, physical activity, physiology, post hoc analysis, psychology, questionnaire, sexual abuse, Surveys and Questionnaires},
pubstate = {published},
tppubtype = {article}
}
Joudeh, I. O.; Cretu, A. -M.; Bouchard, S.
Predicting the Arousal and Valence Values of Emotional States Using Learned, Predesigned, and Deep Visual Features † Journal Article
In: Sensors, vol. 24, no. 13, 2024, ISSN: 14248220 (ISSN), (Publisher: Multidisciplinary Digital Publishing Institute (MDPI)).
Abstract | Links | BibTeX | Tags: adult, Affective interaction, Arousal, artificial neural network, Cognitive state, Cognitive/emotional state, Collaborative interaction, computer, Convolutional neural networks, correlation coefficient, Deep learning, emotion, Emotional state, Emotions, female, Forecasting, Helmet mounted displays, human, Humans, Learning algorithms, Learning systems, Long short-term memory, Machine learning, Machine-learning, male, Mean square error, Neural networks, physiology, Regression, Root mean squared errors, Video recording, virtual reality, Visual feature, visual features
@article{joudeh_predicting_2024,
title = {Predicting the Arousal and Valence Values of Emotional States Using Learned, Predesigned, and Deep Visual Features †},
author = {I. O. Joudeh and A. -M. Cretu and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85198382238&doi=10.3390%2fs24134398&partnerID=40&md5=cefa8b2e2c044d02f99662af350007db},
doi = {10.3390/s24134398},
issn = {14248220 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Sensors},
volume = {24},
number = {13},
abstract = {The cognitive state of a person can be categorized using the circumplex model of emotional states, a continuous model of two dimensions: arousal and valence. The purpose of this research is to select a machine learning model(s) to be integrated into a virtual reality (VR) system that runs cognitive remediation exercises for people with mental health disorders. As such, the prediction of emotional states is essential to customize treatments for those individuals. We exploit the Remote Collaborative and Affective Interactions (RECOLA) database to predict arousal and valence values using machine learning techniques. RECOLA includes audio, video, and physiological recordings of interactions between human participants. To allow learners to focus on the most relevant data, features are extracted from raw data. Such features can be predesigned, learned, or extracted implicitly using deep learners. Our previous work on video recordings focused on predesigned and learned visual features. In this paper, we extend our work onto deep visual features. Our deep visual features are extracted using the MobileNet-v2 convolutional neural network (CNN) that we previously trained on RECOLA’s video frames of full/half faces. As the final purpose of our work is to integrate our solution into a practical VR application using head-mounted displays, we experimented with half faces as a proof of concept. The extracted deep features were then used to predict arousal and valence values via optimizable ensemble regression. We also fused the extracted visual features with the predesigned visual features and predicted arousal and valence values using the combined feature set. In an attempt to enhance our prediction performance, we further fused the predictions of the optimizable ensemble model with the predictions of the MobileNet-v2 model. After decision fusion, we achieved a root mean squared error (RMSE) of 0.1140, a Pearson’s correlation coefficient (PCC) of 0.8000, and a concordance correlation coefficient (CCC) of 0.7868 on arousal predictions. We achieved an RMSE of 0.0790, a PCC of 0.7904, and a CCC of 0.7645 on valence predictions. © 2024 by the authors.},
note = {Publisher: Multidisciplinary Digital Publishing Institute (MDPI)},
keywords = {adult, Affective interaction, Arousal, artificial neural network, Cognitive state, Cognitive/emotional state, Collaborative interaction, computer, Convolutional neural networks, correlation coefficient, Deep learning, emotion, Emotional state, Emotions, female, Forecasting, Helmet mounted displays, human, Humans, Learning algorithms, Learning systems, Long short-term memory, Machine learning, Machine-learning, male, Mean square error, Neural networks, physiology, Regression, Root mean squared errors, Video recording, virtual reality, Visual feature, visual features},
pubstate = {published},
tppubtype = {article}
}
Joudeh, I. O.; Cretu, A. -M.; Bouchard, S.; Guimond, S.
Prediction of Continuous Emotional Measures through Physiological and Visual Data † Journal Article
In: Sensors, vol. 23, no. 12, 2023, ISSN: 14248220, (Publisher: MDPI).
Abstract | Links | BibTeX | Tags: Affect recognition, Affective state, Arousal, Data-source, Deep learning, Electrocardiography, emotion, Emotion Recognition, Emotions, face recognition, Faces detection, Forecasting, human, Humans, Images processing, Learning systems, Machine learning, Machine-learning, mental disease, Mental Disorders, Physiological data, physiology, Signal-processing, Statistical tests, Video recording, Virtual-reality environment
@article{joudeh_prediction_2023,
title = {Prediction of Continuous Emotional Measures through Physiological and Visual Data †},
author = {I. O. Joudeh and A. -M. Cretu and S. Bouchard and S. Guimond},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85163943735&doi=10.3390%2fs23125613&partnerID=40&md5=5e970f0d8c5790b85d8d77a9f3f52a2d},
doi = {10.3390/s23125613},
issn = {14248220},
year = {2023},
date = {2023-01-01},
journal = {Sensors},
volume = {23},
number = {12},
abstract = {The affective state of a person can be measured using arousal and valence values. In this article, we contribute to the prediction of arousal and valence values from various data sources. Our goal is to later use such predictive models to adaptively adjust virtual reality (VR) environments and help facilitate cognitive remediation exercises for users with mental health disorders, such as schizophrenia, while avoiding discouragement. Building on our previous work on physiological, electrodermal activity (EDA) and electrocardiogram (ECG) recordings, we propose improving preprocessing and adding novel feature selection and decision fusion processes. We use video recordings as an additional data source for predicting affective states. We implement an innovative solution based on a combination of machine learning models alongside a series of preprocessing steps. We test our approach on RECOLA, a publicly available dataset. The best results are obtained with a concordance correlation coefficient (CCC) of 0.996 for arousal and 0.998 for valence using physiological data. Related work in the literature reported lower CCCs on the same data modality; thus, our approach outperforms the state-of-the-art approaches for RECOLA. Our study underscores the potential of using advanced machine learning techniques with diverse data sources to enhance the personalization of VR environments. © 2023 by the authors.},
note = {Publisher: MDPI},
keywords = {Affect recognition, Affective state, Arousal, Data-source, Deep learning, Electrocardiography, emotion, Emotion Recognition, Emotions, face recognition, Faces detection, Forecasting, human, Humans, Images processing, Learning systems, Machine learning, Machine-learning, mental disease, Mental Disorders, Physiological data, physiology, Signal-processing, Statistical tests, Video recording, Virtual-reality environment},
pubstate = {published},
tppubtype = {article}
}
Gingras, F.; Fiset, D.; Plouffe-Demers, M. -P.; Deschênes, A.; Cormier, S.; Forget, H.; Blais, C.
Pain in the eye of the beholder: Variations in pain visual representations as a function of face ethnicity and culture Journal Article
In: British Journal of Psychology, vol. 114, no. 3, pp. 621–637, 2023, ISSN: 00071269, (Publisher: John Wiley and Sons Ltd).
Abstract | Links | BibTeX | Tags: Caucasian, emotion, Emotions, Ethnicity, human, Humans, Pain, psychology, White People
@article{gingras_pain_2023,
title = {Pain in the eye of the beholder: Variations in pain visual representations as a function of face ethnicity and culture},
author = {F. Gingras and D. Fiset and M. -P. Plouffe-Demers and A. Deschênes and S. Cormier and H. Forget and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85149411004&doi=10.1111%2fbjop.12641&partnerID=40&md5=eb36c9f5071b30edaff22935109abcea},
doi = {10.1111/bjop.12641},
issn = {00071269},
year = {2023},
date = {2023-01-01},
journal = {British Journal of Psychology},
volume = {114},
number = {3},
pages = {621–637},
abstract = {Pain experienced by Black individuals is systematically underestimated, and recent studies have shown that part of this bias is rooted in perceptual factors. We used Reverse Correlation to estimate visual representations of the pain expression in Black and White faces, in participants originating from both Western and African countries. Groups of raters were then asked to evaluate the presence of pain and other emotions in these representations. A second group of White raters then evaluated those same representations placed over a neutral background face (50% White; 50% Black). Image-based analyses show significant effects of culture and face ethnicity, but no interaction between the two factors. Western representations were more likely to be judged as expressing pain than African representations. For both cultural groups, raters also perceived more pain in White face representations than in Black face representations. However, when changing the background stimulus to the neutral background face, this effect of face ethnic profile disappeared. Overall, these results suggest that individuals have different expectations of how pain is expressed by Black and White individuals, and that cultural factors may explain a part of this phenomenon. © 2023 The Authors. British Journal of Psychology published by John Wiley & Sons Ltd on behalf of The British Psychological Society.},
note = {Publisher: John Wiley and Sons Ltd},
keywords = {Caucasian, emotion, Emotions, Ethnicity, human, Humans, Pain, psychology, White People},
pubstate = {published},
tppubtype = {article}
}
Bérubé, A.; Turgeon, J.; Blais, C.; Fiset, D.
Emotion Recognition in Adults With a History of Childhood Maltreatment: A Systematic Review Journal Article
In: Trauma, Violence, and Abuse, vol. 24, no. 1, pp. 278–294, 2023, ISSN: 15248380 (ISSN), (Publisher: SAGE Publications Ltd).
Abstract | Links | BibTeX | Tags: adult, anger, Child, Child Abuse, childhood maltreatment, cycle of maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, Fear, human, Humans, perception, physiology, psychology, systematic review
@article{berube_emotion_2023,
title = {Emotion Recognition in Adults With a History of Childhood Maltreatment: A Systematic Review},
author = {A. Bérubé and J. Turgeon and C. Blais and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85109658115&doi=10.1177%2f15248380211029403&partnerID=40&md5=5654c858d5c0c84bfdd832a4c04dd1d5},
doi = {10.1177/15248380211029403},
issn = {15248380 (ISSN)},
year = {2023},
date = {2023-01-01},
journal = {Trauma, Violence, and Abuse},
volume = {24},
number = {1},
pages = {278–294},
abstract = {Child maltreatment has many well-documented lasting effects on children. Among its consequences, it affects children’s recognition of emotions. More and more studies are recognizing the lasting effect that a history of maltreatment can have on emotion recognition. A systematic literature review was conducted to better understand this relationship. The Preferred Reporting Items for Systematic Reviews and Meta-Analyses (PRISMA) protocol was used and four databases were searched, MEDLINE/PubMed, PsycINFO, EMBASE, and FRANCIS, using three cross-referenced key words: child abuse, emotion recognition, and adults. The search process identified 23 studies that met the inclusion criteria. The review highlights the wide variety of measures used to assess child maltreatment as well as the different protocols used to measure emotion recognition. The results indicate that adults with a history of childhood maltreatment show a differentiated reaction to happiness, anger, and fear. Happiness is less detected, whereas negative emotions are recognized more rapidly and at a lower intensity compared to adults not exposed to such traumatic events. Emotion recognition is also related to greater brain activation for the maltreated group. However, the results are less consistent for adults who also have a diagnosis of mental health problems. The systematic review found that maltreatment affects the perception of emotions expressed on both adult and child faces. However, more research is needed to better understand how a history of maltreatment is related to adults’ perception of children’s emotions. © The Author(s) 2021.},
note = {Publisher: SAGE Publications Ltd},
keywords = {adult, anger, Child, Child Abuse, childhood maltreatment, cycle of maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, Fear, human, Humans, perception, physiology, psychology, systematic review},
pubstate = {published},
tppubtype = {article}
}
Charbonneau, I.; Guérette, J.; Cormier, S.; Blais, C.; Lalonde-Beaudoin, G.; Smith, F. W.; Fiset, D.
The role of spatial frequencies for facial pain categorization Journal Article
In: Scientific Reports, vol. 11, no. 1, 2021, ISSN: 20452322, (Publisher: Nature Research).
Abstract | Links | BibTeX | Tags: Adolescent, adult, Classification, Distance Perception, emotion, Emotions, Face, face pain, Facial Expression, Facial Pain, Facial Recognition, female, human, Humans, Knowledge, male, Normal Distribution, Pattern Recognition, procedures, psychology, Psychophysics, recognition, reproducibility, Reproducibility of Results, Visual, Young Adult
@article{charbonneau_role_2021,
title = {The role of spatial frequencies for facial pain categorization},
author = {I. Charbonneau and J. Guérette and S. Cormier and C. Blais and G. Lalonde-Beaudoin and F. W. Smith and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85111138273&doi=10.1038%2fs41598-021-93776-7&partnerID=40&md5=d759d0218de65fce371bb51d7f2593d8},
doi = {10.1038/s41598-021-93776-7},
issn = {20452322},
year = {2021},
date = {2021-01-01},
journal = {Scientific Reports},
volume = {11},
number = {1},
abstract = {Studies on low-level visual information underlying pain categorization have led to inconsistent findings. Some show an advantage for low spatial frequency information (SFs) and others a preponderance of mid SFs. This study aims to clarify this gap in knowledge since these results have different theoretical and practical implications, such as how far away an observer can be in order to categorize pain. This study addresses this question by using two complementary methods: a data-driven method without a priori expectations about the most useful SFs for pain recognition and a more ecological method that simulates the distance of stimuli presentation. We reveal a broad range of important SFs for pain recognition starting from low to relatively high SFs and showed that performance is optimal in a short to medium distance (1.2–4.8 m) but declines significantly when mid SFs are no longer available. This study reconciles previous results that show an advantage of LSFs over HSFs when using arbitrary cutoffs, but above all reveal the prominent role of mid-SFs for pain recognition across two complementary experimental tasks. © 2021, The Author(s).},
note = {Publisher: Nature Research},
keywords = {Adolescent, adult, Classification, Distance Perception, emotion, Emotions, Face, face pain, Facial Expression, Facial Pain, Facial Recognition, female, human, Humans, Knowledge, male, Normal Distribution, Pattern Recognition, procedures, psychology, Psychophysics, recognition, reproducibility, Reproducibility of Results, Visual, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Turgeon, J.; Berube, A.; Blais, C.; Lemieux, A.; Fournier, A.
Recognition of children's emotional facial expressions among mothers reporting a history of childhood maltreatment Journal Article
In: PLoS ONE, vol. 15, no. 12 December, 2020, ISSN: 19326203, (Publisher: Public Library of Science).
Abstract | Links | BibTeX | Tags: adult, Adverse Childhood Experiences, anger, article, Child, Child Abuse, Childhood Trauma Questionnaire, disgust, emotion, emotional neglect, Emotions, Facial Expression, Facial Recognition, Fear, female, happiness, human, Humans, major clinical study, male, mother, Mothers, parenthood, path analysis, physical abuse, Preschool, preschool child, psychology, recognition, Retrospective Studies, retrospective study, sadness, self report, sexual abuse, structural equation modeling, Young Adult
@article{turgeon_recognition_2020,
title = {Recognition of children's emotional facial expressions among mothers reporting a history of childhood maltreatment},
author = {J. Turgeon and A. Berube and C. Blais and A. Lemieux and A. Fournier},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85098916379&doi=10.1371%2fjournal.pone.0243083&partnerID=40&md5=2ef477465c0ad75d67b7f13d05f783b2},
doi = {10.1371/journal.pone.0243083},
issn = {19326203},
year = {2020},
date = {2020-01-01},
journal = {PLoS ONE},
volume = {15},
number = {12 December},
abstract = {Several studies have shown that child maltreatment is associated with both positive and negative effects on the recognition of facial emotions. Research has provided little evidence of a relation between maltreatment during childhood and young adults' ability to recognize facial displays of emotion in children, an essential skill for a sensitive parental response. In this study, we examined the consequences of different forms of maltreatment experienced in childhood on emotion recognition during parenthood. Participants included sixty-three mothers of children aged 2 to 5 years. Retrospective self-reports of childhood maltreatment were assessed using the short form of the Childhood Trauma Questionnaire (CTQ). Emotion recognition was measured using a morphed facial emotion identification task of all six basic emotions (anger, disgust, fear, happiness, sadness, and surprise). A Path Analysis via Structural Equation Model revealed that a history of physical abuse is related to a decreased ability to recognize both fear and sadness in children, whereas emotional abuse and sexual abuse are related to a decreased ability to recognize anger in children. In addition, emotional neglect is associated with an increased ability to recognize anger, whereas physical neglect is associated with less accuracy in recognizing happiness in children's facial emotional expressions. These findings have important clinical implications and expand current understanding of the consequences of childhood maltreatment on parents' ability to detect children's needs. © 2020 Turgeon et al.},
note = {Publisher: Public Library of Science},
keywords = {adult, Adverse Childhood Experiences, anger, article, Child, Child Abuse, Childhood Trauma Questionnaire, disgust, emotion, emotional neglect, Emotions, Facial Expression, Facial Recognition, Fear, female, happiness, human, Humans, major clinical study, male, mother, Mothers, parenthood, path analysis, physical abuse, Preschool, preschool child, psychology, recognition, Retrospective Studies, retrospective study, sadness, self report, sexual abuse, structural equation modeling, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Bérubé, A.; Blais, C.; Fournier, A.; Turgeon, J.; Forget, H.; Coutu, S.; Dubeau, D.
Childhood maltreatment moderates the relationship between emotion recognition and maternal sensitive behaviors Journal Article
In: Child Abuse and Neglect, vol. 102, 2020, ISSN: 01452134 (ISSN), (Publisher: Elsevier Ltd).
Abstract | Links | BibTeX | Tags: article, Child, Child Abuse, childhood maltreatment, Childhood Trauma Questionnaire, emotion, Emotion Recognition, Emotions, female, human, human experiment, Humans, male, Maternal Behavior, mother child relation, Mother-Child Relations, photography, physiology, Preschool, preschool child, psychology, Sensitive behaviors
@article{berube_childhood_2020,
title = {Childhood maltreatment moderates the relationship between emotion recognition and maternal sensitive behaviors},
author = {A. Bérubé and C. Blais and A. Fournier and J. Turgeon and H. Forget and S. Coutu and D. Dubeau},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85079890346&doi=10.1016%2fj.chiabu.2020.104432&partnerID=40&md5=05add864de22734e614fe7a34d6d6f1a},
doi = {10.1016/j.chiabu.2020.104432},
issn = {01452134 (ISSN)},
year = {2020},
date = {2020-01-01},
journal = {Child Abuse and Neglect},
volume = {102},
abstract = {Background: Sensitivity is defined as parents ability to perceive, react and respond to children signals. Having a history of childhood maltreatment changes the way adults perceive visual emotions. These perceptual characteristics could have important consequences on how these parents respond to their children. Objective: The current study examines how a history of childhood maltreatment moderates the relationship between maternal emotion recognition in child faces and sensitive behaviors toward their child during free-play and a structured task. Participants and Setting: Participants included 58 mothers and their children aged between 2 and 5 years. Methods: Mothers were exposed to a set of photographs of child faces showing morphed images of the six basic emotional expressions. Mother-child interactions were then coded for sensitive behaviors. Mothers’ history of childhood maltreatment was assessed using the Childhood Trauma Questionnaire. Results: Maltreatment severity was related to poorer abilities in emotion recognition. However, the association between emotion recognition and sensitive behavior was moderate by history of childhood maltreatment. For mothers exposed to a severe form of childhood maltreatment, a better emotion recognition was related to less sensitive behaviors toward the child, both during free-play and the structured task. Conclusion: This relationship is unique to these mothers and is inconsistent with Ainsworth's definition of sensitivity. These results have important implications as they suggest mothers with a history of severe maltreatment would need tailored interventions which take into account their particular reactions to children's emotions. © 2020},
note = {Publisher: Elsevier Ltd},
keywords = {article, Child, Child Abuse, childhood maltreatment, Childhood Trauma Questionnaire, emotion, Emotion Recognition, Emotions, female, human, human experiment, Humans, male, Maternal Behavior, mother child relation, Mother-Child Relations, photography, physiology, Preschool, preschool child, psychology, Sensitive behaviors},
pubstate = {published},
tppubtype = {article}
}
Quintana, P.; Nolet, K.; Baus, O.; Bouchard, S.
The effect of exposure to fear-related body odorants on anxiety and interpersonal trust toward a virtual character Journal Article
In: Chemical Senses, vol. 44, no. 9, pp. 683–692, 2019, ISSN: 0379864X, (Publisher: Oxford University Press).
Abstract | Links | BibTeX | Tags: adult, Anxiety, article, body odor, body odorant, chemistry, controlled study, emotion, Emotions, exposure, Fear, female, fragrance, happiness, human, human experiment, Humans, male, metabolism, Middle Aged, odor, Odorants, pathology, Pleasure, priority journal, Sweat, sweating, Trust, unclassified drug, virtual reality, Young Adult
@article{quintana_effect_2019,
title = {The effect of exposure to fear-related body odorants on anxiety and interpersonal trust toward a virtual character},
author = {P. Quintana and K. Nolet and O. Baus and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85074305238&doi=10.1093%2fchemse%2fbjz063&partnerID=40&md5=50ec319370e1688498abfa845c7ec343},
doi = {10.1093/chemse/bjz063},
issn = {0379864X},
year = {2019},
date = {2019-01-01},
journal = {Chemical Senses},
volume = {44},
number = {9},
pages = {683–692},
abstract = {A growing body of literature documents how exposure to another person's fear-related body odorants can increase one's own anxiety and interfere with processing of social information, such as facial expression and impression formation. Building on these results, we aimed to 1) test the hypothesis that exposure to fear-related odorant would affect impression formation through fear contagion and 2) verify whether these effects can be observed in an ecologically valid (i.e., virtual) environment. We proposed that exposure to fear-related odorant would cause receivers to feel more anxious, which in turn would lead them to report less trust toward an unknown virtual character. This study had 2 distinct phases. First, we collected perspiration odorants from the armpits of 12 male senders (i.e., the source of the odorant) during the viewing of either fear or joy inducing film clips. In the second phase, 53 women receivers were exposed to either a fear, joy, or neutral odorant (i.e., between-subjects design) by breathing through a gauze attached to a disposable respirator mask while immersed in a virtual bar. As expected, receivers exposed to fear odorants felt significantly more stressed. Mediation analysis also revealed an indirect effect of exposure on trust through anxiety. More specifically, the more anxious the receiver felt, the less she trusted the virtual character. Our results show for the first time that the impact of exposure to fear-related body odorants on negative interpersonal impression formation is mediated by the anxiety induced in the receiver. © 2019 The Author(s) 2019. Published by Oxford University Press. All rights reserved.},
note = {Publisher: Oxford University Press},
keywords = {adult, Anxiety, article, body odor, body odorant, chemistry, controlled study, emotion, Emotions, exposure, Fear, female, fragrance, happiness, human, human experiment, Humans, male, metabolism, Middle Aged, odor, Odorants, pathology, Pleasure, priority journal, Sweat, sweating, Trust, unclassified drug, virtual reality, Young Adult},
pubstate = {published},
tppubtype = {article}
}