

de Recherche et d’Innovation
en Cybersécurité et Société
Lévesque-Lacasse, A.; Desjardins, M. -C.; Fiset, D.; Charbonneau, C.; Cormier, S.; Blais, C.
Dans: Journal of Pain, vol. 25, no 1, p. 250–264, 2024, ISSN: 15265900, (Publisher: Elsevier B.V.).
Résumé | Liens | BibTeX | Étiquettes: anger, article, chronic pain, disgust, emotion, Emotions, Empathy, Estimation bias, expectation, eyebrow, Facial Expression, Facial expressions of pain, human, Humans, mental representation, Mental representations, motivation, Pain, pain assessment, psychology, questionnaire, reliability, reproducibility, Reproducibility of Results, Reverse correlation, sadness, sensitivity, vision, Visual Perception
@article{levesque-lacasse_relationship_2024,
title = {The Relationship Between the Ability to Infer Another's Pain and the Expectations Regarding the Appearance of Pain Facial Expressions: Investigation of the Role of Visual Perception},
author = {A. Lévesque-Lacasse and M. -C. Desjardins and D. Fiset and C. Charbonneau and S. Cormier and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85171357836&doi=10.1016%2fj.jpain.2023.08.007&partnerID=40&md5=dcfeb2e0eb9f13b42122ddfcbc987fc0},
doi = {10.1016/j.jpain.2023.08.007},
issn = {15265900},
year = {2024},
date = {2024-01-01},
journal = {Journal of Pain},
volume = {25},
number = {1},
pages = {250–264},
abstract = {Although pain is a commonly experienced and observed affective state, it is frequently misinterpreted, which leads to inadequate caregiving. Studies show the ability at estimating pain in others (estimation bias) and detecting its subtle variations (sensitivity) could emerge from independent mechanisms. While estimation bias is modulated by variables such as empathy level, pain catastrophizing tendency, and overexposure to pain, sensitivity remains unimpacted. The present study verifies if these 2 types of inaccuracies are partly explained by perceptual factors. Using reverse correlation, we measured their association with participants' mental representation of pain, or more simply put, with their expectations of what the face of a person in pain should look like. Experiment 1 shows that both parameters are associated with variations in expectations of this expression. More specifically, the estimation bias is linked with expectations characterized by salient changes in the middle face region, whereas sensitivity is associated with salient changes in the eyebrow region. Experiment 2 reveals that bias and sensitivity yield differences in emotional representations. Expectations of individuals with a lower underestimation tendency are qualitatively rated as expressing more pain and sadness, and those of individuals with a higher level of sensitivity as expressing more pain, anger, and disgust. Together, these results provide evidence for a perceptual contribution in pain inferencing that is independent of other psychosocial variables and its link to observers’ expectations. Perspective: This article reinforces the contribution of perceptual mechanisms in pain assessment. Moreover, strategies aimed to improve the reliability of individuals’ expectations regarding the appearance of facial expressions of pain could potentially be developed, and contribute to decrease inaccuracies found in pain assessment and the confusion between pain and other affective states. © 2023 United States Association for the Study of Pain, Inc.},
note = {Publisher: Elsevier B.V.},
keywords = {anger, article, chronic pain, disgust, emotion, Emotions, Empathy, Estimation bias, expectation, eyebrow, Facial Expression, Facial expressions of pain, human, Humans, mental representation, Mental representations, motivation, Pain, pain assessment, psychology, questionnaire, reliability, reproducibility, Reproducibility of Results, Reverse correlation, sadness, sensitivity, vision, Visual Perception},
pubstate = {published},
tppubtype = {article}
}
Pétrin, R.; Bérubé, A.; St-Pierre, É.; Blais, C.
Maternal childhood emotional abuse increases cardiovascular responses to children’s emotional facial expressions Article de journal
Dans: PLoS ONE, vol. 19, no 5 May, 2024, ISSN: 19326203 (ISSN), (Publisher: Public Library of Science).
Résumé | Liens | BibTeX | Étiquettes: adult, alcohol consumption, analysis of variance, article, blood pressure, cardiovascular response, Child, Child Abuse, Childhood, Childhood Trauma Questionnaire, demographics, electrocardiogram, Electrocardiography, emotion, Emotional Abuse, Emotions, Ethnicity, Facial Expression, female, Heart Rate, heart rate variability, human, human experiment, Humans, Likert scale, male, mother, mother child relation, Mother-Child Relations, Mothers, parasympathetic tone, physical activity, physiology, post hoc analysis, psychology, questionnaire, sexual abuse, Surveys and Questionnaires
@article{petrin_maternal_2024,
title = {Maternal childhood emotional abuse increases cardiovascular responses to children’s emotional facial expressions},
author = {R. Pétrin and A. Bérubé and É. St-Pierre and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85192637581&doi=10.1371%2fjournal.pone.0302782&partnerID=40&md5=c464b30fe7cc5b7b0baaf865fdf1f6de},
doi = {10.1371/journal.pone.0302782},
issn = {19326203 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {PLoS ONE},
volume = {19},
number = {5 May},
abstract = {Parents with a history of childhood maltreatment may be more likely to respond inadequately to their child’s emotional cues, such as crying or screaming, due to previous exposure to prolonged stress. While studies have investigated parents’ physiological reactions to their children’s vocal expressions of emotions, less attention has been given to their responses when perceiving children’s facial expressions of emotions. The present study aimed to determine if viewing facial expressions of emotions in children induces cardiovascular changes in mothers (hypo- or hyper-arousal) and whether these differ as a function of childhood maltreatment. A total of 104 mothers took part in this study. Their experiences of childhood maltreatment were measured using the Childhood Trauma Questionnaire (CTQ). Participants’ electrocardiogram signals were recorded during a task in which they viewed a landscape video (baseline) and images of children’s faces expressing different intensities of emotion. Heart rate variability (HRV) was extracted from the recordings as an indicator of parasympathetic reactivity. Participants presented two profiles: one group of mothers had a decreased HRV when presented with images of children’s facial expressions of emotions, while the other group’s HRV increased. However, HRV change was not significantly different between the two groups. The interaction between HRV groups and the severity of maltreatment experienced was marginal. Results suggested that experiences of childhood emotional abuse were more common in mothers whose HRV increased during the task. Therefore, more severe childhood experiences of emotional abuse could be associated with mothers’ cardiovascular hyperreactivity. Maladaptive cardiovascular responses could have a ripple effect, influencing how mothers react to their children’s facial expressions of emotions. That reaction could affect the quality of their interaction with their child. Providing interventions that help parents regulate their physiological and behavioral responses to stress might be helpful, especially if they have experienced childhood maltreatment. © 2024 Public Library of Science. All rights reserved.},
note = {Publisher: Public Library of Science},
keywords = {adult, alcohol consumption, analysis of variance, article, blood pressure, cardiovascular response, Child, Child Abuse, Childhood, Childhood Trauma Questionnaire, demographics, electrocardiogram, Electrocardiography, emotion, Emotional Abuse, Emotions, Ethnicity, Facial Expression, female, Heart Rate, heart rate variability, human, human experiment, Humans, Likert scale, male, mother, mother child relation, Mother-Child Relations, Mothers, parasympathetic tone, physical activity, physiology, post hoc analysis, psychology, questionnaire, sexual abuse, Surveys and Questionnaires},
pubstate = {published},
tppubtype = {article}
}
Bérubé, A.; Pétrin, R.; Blais, C.
Parental depression moderates the relationship between childhood maltreatment and the recognition of children expressions of emotions Article de journal
Dans: Frontiers in Psychiatry, vol. 15, 2024, ISSN: 16640640 (ISSN), (Publisher: Frontiers Media SA).
Résumé | Liens | BibTeX | Étiquettes: adult, anger, article, Beck Depression Inventory, Child, Child Abuse, child parent relation, childhood maltreatment, Childhood Trauma Questionnaire, Depression, disease severity, disgust, educational status, emotion, Emotion Recognition, Facial Expression, female, happiness, human, income, major clinical study, male, parent-child relationship, parental sensitivity, preschool child, questionnaire, recognition, sadness
@article{berube_parental_2024,
title = {Parental depression moderates the relationship between childhood maltreatment and the recognition of children expressions of emotions},
author = {A. Bérubé and R. Pétrin and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85196266525&doi=10.3389%2ffpsyt.2024.1374872&partnerID=40&md5=ce03a1c39e709fc0f2c773d4f82f3a10},
doi = {10.3389/fpsyt.2024.1374872},
issn = {16640640 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Frontiers in Psychiatry},
volume = {15},
abstract = {Background: Sensitivity plays a crucial role in parenting as it involves the ability to perceive and respond appropriately to children’s signals. Childhood maltreatment and depression can negatively impact adults’ ability to recognize emotions, but it is unclear which of these factors has a greater impact or how they interact. This knowledge is central to developing efficient, targeted interventions. This paper examines the interaction between parents’ depressive symptoms and childhood maltreatment and its influence on their ability to recognize the five basic emotions (happiness, anger, sadness, fear, and disgust) in children’s faces. Method: The sample consisted of 52 parents. Depressive symptoms were measured by the depression subscale of the Brief Symptom Inventory-18 (BSI-18), and maltreatment history was assessed by the Childhood Trauma Questionnaire (CTQ). Children’s emotional stimuli were morphed images created using The Child Affective Facial Expression (CAFE) database. Results: Our findings indicate that depressive symptoms moderate the relationship between parents’ history of childhood maltreatment and emotion recognition skills. Parents with higher depressive symptoms had lower emotion recognition accuracy when they had not experienced maltreatment. When childhood maltreatment was severe, emotion recognition skills were more consistent across all levels of depression. The relationship between depression and emotion recognition was primarily linked to recognizing sadness in children’s faces. Conclusion: These findings highlight how different experiences can affect parental abilities in emotion recognition and emphasize the need for interventions tailored to individual profiles to improve their effectiveness. Copyright © 2024 Bérubé, Pétrin and Blais.},
note = {Publisher: Frontiers Media SA},
keywords = {adult, anger, article, Beck Depression Inventory, Child, Child Abuse, child parent relation, childhood maltreatment, Childhood Trauma Questionnaire, Depression, disease severity, disgust, educational status, emotion, Emotion Recognition, Facial Expression, female, happiness, human, income, major clinical study, male, parent-child relationship, parental sensitivity, preschool child, questionnaire, recognition, sadness},
pubstate = {published},
tppubtype = {article}
}
Joudeh, I. O.; Cretu, A. -M.; Bouchard, S.
Predicting the Arousal and Valence Values of Emotional States Using Learned, Predesigned, and Deep Visual Features † Article de journal
Dans: Sensors, vol. 24, no 13, 2024, ISSN: 14248220 (ISSN), (Publisher: Multidisciplinary Digital Publishing Institute (MDPI)).
Résumé | Liens | BibTeX | Étiquettes: adult, Affective interaction, Arousal, artificial neural network, Cognitive state, Cognitive/emotional state, Collaborative interaction, computer, Convolutional neural networks, correlation coefficient, Deep learning, emotion, Emotional state, Emotions, female, Forecasting, Helmet mounted displays, human, Humans, Learning algorithms, Learning systems, Long short-term memory, Machine learning, Machine-learning, male, Mean square error, Neural networks, physiology, Regression, Root mean squared errors, Video recording, virtual reality, Visual feature, visual features
@article{joudeh_predicting_2024,
title = {Predicting the Arousal and Valence Values of Emotional States Using Learned, Predesigned, and Deep Visual Features †},
author = {I. O. Joudeh and A. -M. Cretu and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85198382238&doi=10.3390%2fs24134398&partnerID=40&md5=cefa8b2e2c044d02f99662af350007db},
doi = {10.3390/s24134398},
issn = {14248220 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Sensors},
volume = {24},
number = {13},
abstract = {The cognitive state of a person can be categorized using the circumplex model of emotional states, a continuous model of two dimensions: arousal and valence. The purpose of this research is to select a machine learning model(s) to be integrated into a virtual reality (VR) system that runs cognitive remediation exercises for people with mental health disorders. As such, the prediction of emotional states is essential to customize treatments for those individuals. We exploit the Remote Collaborative and Affective Interactions (RECOLA) database to predict arousal and valence values using machine learning techniques. RECOLA includes audio, video, and physiological recordings of interactions between human participants. To allow learners to focus on the most relevant data, features are extracted from raw data. Such features can be predesigned, learned, or extracted implicitly using deep learners. Our previous work on video recordings focused on predesigned and learned visual features. In this paper, we extend our work onto deep visual features. Our deep visual features are extracted using the MobileNet-v2 convolutional neural network (CNN) that we previously trained on RECOLA’s video frames of full/half faces. As the final purpose of our work is to integrate our solution into a practical VR application using head-mounted displays, we experimented with half faces as a proof of concept. The extracted deep features were then used to predict arousal and valence values via optimizable ensemble regression. We also fused the extracted visual features with the predesigned visual features and predicted arousal and valence values using the combined feature set. In an attempt to enhance our prediction performance, we further fused the predictions of the optimizable ensemble model with the predictions of the MobileNet-v2 model. After decision fusion, we achieved a root mean squared error (RMSE) of 0.1140, a Pearson’s correlation coefficient (PCC) of 0.8000, and a concordance correlation coefficient (CCC) of 0.7868 on arousal predictions. We achieved an RMSE of 0.0790, a PCC of 0.7904, and a CCC of 0.7645 on valence predictions. © 2024 by the authors.},
note = {Publisher: Multidisciplinary Digital Publishing Institute (MDPI)},
keywords = {adult, Affective interaction, Arousal, artificial neural network, Cognitive state, Cognitive/emotional state, Collaborative interaction, computer, Convolutional neural networks, correlation coefficient, Deep learning, emotion, Emotional state, Emotions, female, Forecasting, Helmet mounted displays, human, Humans, Learning algorithms, Learning systems, Long short-term memory, Machine learning, Machine-learning, male, Mean square error, Neural networks, physiology, Regression, Root mean squared errors, Video recording, virtual reality, Visual feature, visual features},
pubstate = {published},
tppubtype = {article}
}
Joudeh, I. O.; Cretu, A. -M.; Bouchard, S.; Guimond, S.
Prediction of Continuous Emotional Measures through Physiological and Visual Data † Article de journal
Dans: Sensors, vol. 23, no 12, 2023, ISSN: 14248220, (Publisher: MDPI).
Résumé | Liens | BibTeX | Étiquettes: Affect recognition, Affective state, Arousal, Data-source, Deep learning, Electrocardiography, emotion, Emotion Recognition, Emotions, face recognition, Faces detection, Forecasting, human, Humans, Images processing, Learning systems, Machine learning, Machine-learning, mental disease, Mental Disorders, Physiological data, physiology, Signal-processing, Statistical tests, Video recording, Virtual-reality environment
@article{joudeh_prediction_2023,
title = {Prediction of Continuous Emotional Measures through Physiological and Visual Data †},
author = {I. O. Joudeh and A. -M. Cretu and S. Bouchard and S. Guimond},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85163943735&doi=10.3390%2fs23125613&partnerID=40&md5=5e970f0d8c5790b85d8d77a9f3f52a2d},
doi = {10.3390/s23125613},
issn = {14248220},
year = {2023},
date = {2023-01-01},
journal = {Sensors},
volume = {23},
number = {12},
abstract = {The affective state of a person can be measured using arousal and valence values. In this article, we contribute to the prediction of arousal and valence values from various data sources. Our goal is to later use such predictive models to adaptively adjust virtual reality (VR) environments and help facilitate cognitive remediation exercises for users with mental health disorders, such as schizophrenia, while avoiding discouragement. Building on our previous work on physiological, electrodermal activity (EDA) and electrocardiogram (ECG) recordings, we propose improving preprocessing and adding novel feature selection and decision fusion processes. We use video recordings as an additional data source for predicting affective states. We implement an innovative solution based on a combination of machine learning models alongside a series of preprocessing steps. We test our approach on RECOLA, a publicly available dataset. The best results are obtained with a concordance correlation coefficient (CCC) of 0.996 for arousal and 0.998 for valence using physiological data. Related work in the literature reported lower CCCs on the same data modality; thus, our approach outperforms the state-of-the-art approaches for RECOLA. Our study underscores the potential of using advanced machine learning techniques with diverse data sources to enhance the personalization of VR environments. © 2023 by the authors.},
note = {Publisher: MDPI},
keywords = {Affect recognition, Affective state, Arousal, Data-source, Deep learning, Electrocardiography, emotion, Emotion Recognition, Emotions, face recognition, Faces detection, Forecasting, human, Humans, Images processing, Learning systems, Machine learning, Machine-learning, mental disease, Mental Disorders, Physiological data, physiology, Signal-processing, Statistical tests, Video recording, Virtual-reality environment},
pubstate = {published},
tppubtype = {article}
}
Joudeh, I. O.; Cretu, A. -M.; Bouchard, S.; Guimond, S.
Prediction of Emotional States from Partial Facial Features for Virtual Reality Applications Article de journal
Dans: Annual Review of CyberTherapy and Telemedicine, vol. 21, p. 17–21, 2023, ISSN: 15548716, (Publisher: Interactive Media Institute).
Résumé | Liens | BibTeX | Étiquettes: Arousal, article, clinical article, convolutional neural network, correlation coefficient, data base, emotion, facies, female, human, human experiment, Image processing, long short term memory network, male, random forest, residual neural network, root mean squared error, videorecording, virtual reality
@article{joudeh_prediction_2023-1,
title = {Prediction of Emotional States from Partial Facial Features for Virtual Reality Applications},
author = {I. O. Joudeh and A. -M. Cretu and S. Bouchard and S. Guimond},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85182471413&partnerID=40&md5=8190e0dbb5b48ae508515f4029b0a0d1},
issn = {15548716},
year = {2023},
date = {2023-01-01},
journal = {Annual Review of CyberTherapy and Telemedicine},
volume = {21},
pages = {17–21},
abstract = {The availability of virtual reality (VR) in numerous clinical contexts has been made possible by recent technological advancements. One application is using VR for cognitive interventions with individuals who have mental disorders. Predicting the emotional states of users could help to prevent their discouragement during VR interventions. We can monitor the emotional states of individuals using sensors like an external camera, as they engage in various tasks within VR environments. The emotional state of VR users can be measured through arousal and valence, as per the Circumplex model. We used the Remote Collaborative and Affective Interactions (RECOLA) database of emotional behaviours. We processed video frames from 18 RECOLA videos. Due to the headset in VR systems, we detected faces and cropped the images of faces to use the lower half of the face only. We labeled the images with arousal and valence values to reflect various emotions. Convolutional neural networks (CNNs), specifically MobileNet-v2 and ResNets-18, were then used to predict arousal and valence values. MobileNet-v2 outperforms ResNet-18 as well as others from the literature. We achieved a root mean squared error (RMSE), Pearson’s correlation coefficient (PCC), and Concordance correlation coefficient (CCC) of 0.1495, 0.6387, and 0.6081 for arousal, and 0.0996, 0.6453, and 0.6232 for valence. Our work acts as a proof-of-concept for predicting emotional states from arousal and valence values via visual data of users immersed in VR experiences. In the future, predicted emotions could be used to automatically adjust the VR environment for individuals engaged in cognitive interventions. © 2023, Interactive Media Institute. All rights reserved.},
note = {Publisher: Interactive Media Institute},
keywords = {Arousal, article, clinical article, convolutional neural network, correlation coefficient, data base, emotion, facies, female, human, human experiment, Image processing, long short term memory network, male, random forest, residual neural network, root mean squared error, videorecording, virtual reality},
pubstate = {published},
tppubtype = {article}
}
Gingras, F.; Fiset, D.; Plouffe-Demers, M. -P.; Deschênes, A.; Cormier, S.; Forget, H.; Blais, C.
Pain in the eye of the beholder: Variations in pain visual representations as a function of face ethnicity and culture Article de journal
Dans: British Journal of Psychology, vol. 114, no 3, p. 621–637, 2023, ISSN: 00071269, (Publisher: John Wiley and Sons Ltd).
Résumé | Liens | BibTeX | Étiquettes: Caucasian, emotion, Emotions, Ethnicity, human, Humans, Pain, psychology, White People
@article{gingras_pain_2023,
title = {Pain in the eye of the beholder: Variations in pain visual representations as a function of face ethnicity and culture},
author = {F. Gingras and D. Fiset and M. -P. Plouffe-Demers and A. Deschênes and S. Cormier and H. Forget and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85149411004&doi=10.1111%2fbjop.12641&partnerID=40&md5=eb36c9f5071b30edaff22935109abcea},
doi = {10.1111/bjop.12641},
issn = {00071269},
year = {2023},
date = {2023-01-01},
journal = {British Journal of Psychology},
volume = {114},
number = {3},
pages = {621–637},
abstract = {Pain experienced by Black individuals is systematically underestimated, and recent studies have shown that part of this bias is rooted in perceptual factors. We used Reverse Correlation to estimate visual representations of the pain expression in Black and White faces, in participants originating from both Western and African countries. Groups of raters were then asked to evaluate the presence of pain and other emotions in these representations. A second group of White raters then evaluated those same representations placed over a neutral background face (50% White; 50% Black). Image-based analyses show significant effects of culture and face ethnicity, but no interaction between the two factors. Western representations were more likely to be judged as expressing pain than African representations. For both cultural groups, raters also perceived more pain in White face representations than in Black face representations. However, when changing the background stimulus to the neutral background face, this effect of face ethnic profile disappeared. Overall, these results suggest that individuals have different expectations of how pain is expressed by Black and White individuals, and that cultural factors may explain a part of this phenomenon. © 2023 The Authors. British Journal of Psychology published by John Wiley & Sons Ltd on behalf of The British Psychological Society.},
note = {Publisher: John Wiley and Sons Ltd},
keywords = {Caucasian, emotion, Emotions, Ethnicity, human, Humans, Pain, psychology, White People},
pubstate = {published},
tppubtype = {article}
}
Bérubé, A.; Turgeon, J.; Blais, C.; Fiset, D.
Emotion Recognition in Adults With a History of Childhood Maltreatment: A Systematic Review Article de journal
Dans: Trauma, Violence, and Abuse, vol. 24, no 1, p. 278–294, 2023, ISSN: 15248380 (ISSN), (Publisher: SAGE Publications Ltd).
Résumé | Liens | BibTeX | Étiquettes: adult, anger, Child, Child Abuse, childhood maltreatment, cycle of maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, Fear, human, Humans, perception, physiology, psychology, systematic review
@article{berube_emotion_2023,
title = {Emotion Recognition in Adults With a History of Childhood Maltreatment: A Systematic Review},
author = {A. Bérubé and J. Turgeon and C. Blais and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85109658115&doi=10.1177%2f15248380211029403&partnerID=40&md5=5654c858d5c0c84bfdd832a4c04dd1d5},
doi = {10.1177/15248380211029403},
issn = {15248380 (ISSN)},
year = {2023},
date = {2023-01-01},
journal = {Trauma, Violence, and Abuse},
volume = {24},
number = {1},
pages = {278–294},
abstract = {Child maltreatment has many well-documented lasting effects on children. Among its consequences, it affects children’s recognition of emotions. More and more studies are recognizing the lasting effect that a history of maltreatment can have on emotion recognition. A systematic literature review was conducted to better understand this relationship. The Preferred Reporting Items for Systematic Reviews and Meta-Analyses (PRISMA) protocol was used and four databases were searched, MEDLINE/PubMed, PsycINFO, EMBASE, and FRANCIS, using three cross-referenced key words: child abuse, emotion recognition, and adults. The search process identified 23 studies that met the inclusion criteria. The review highlights the wide variety of measures used to assess child maltreatment as well as the different protocols used to measure emotion recognition. The results indicate that adults with a history of childhood maltreatment show a differentiated reaction to happiness, anger, and fear. Happiness is less detected, whereas negative emotions are recognized more rapidly and at a lower intensity compared to adults not exposed to such traumatic events. Emotion recognition is also related to greater brain activation for the maltreated group. However, the results are less consistent for adults who also have a diagnosis of mental health problems. The systematic review found that maltreatment affects the perception of emotions expressed on both adult and child faces. However, more research is needed to better understand how a history of maltreatment is related to adults’ perception of children’s emotions. © The Author(s) 2021.},
note = {Publisher: SAGE Publications Ltd},
keywords = {adult, anger, Child, Child Abuse, childhood maltreatment, cycle of maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, Fear, human, Humans, perception, physiology, psychology, systematic review},
pubstate = {published},
tppubtype = {article}
}
Békés, V.; Belleville, G.; Lebel, J.; Ouellet, M. -C.; Chen, Z.; Morin, C. M.; Bergeron, N.; Campbell, T. S.; Ghosh, S.; Bouchard, S.; Guay, S.; MacMaster, F. P.
Trainee Therapists’ Perceptions of a Blended Intervention to Promote Resilience after a Natural Disaster: A Qualitative Case Study Article de journal
Dans: Journal of Clinical Medicine, vol. 11, no 15, 2022, ISSN: 20770383, (Publisher: MDPI).
Résumé | Liens | BibTeX | Étiquettes: adult, Alberta, article, case study, Depression, emotion, Empathy, expectation, female, human, insomnia, natural disaster, perception, personal experience, posttraumatic stress disorder, psychological resilience, psychotherapist, qualitative analysis, survivor, thematic analysis, therapeutic alliance, wildfire, Young Adult
@article{bekes_trainee_2022,
title = {Trainee Therapists’ Perceptions of a Blended Intervention to Promote Resilience after a Natural Disaster: A Qualitative Case Study},
author = {V. Békés and G. Belleville and J. Lebel and M. -C. Ouellet and Z. Chen and C. M. Morin and N. Bergeron and T. S. Campbell and S. Ghosh and S. Bouchard and S. Guay and F. P. MacMaster},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85136961448&doi=10.3390%2fjcm11154361&partnerID=40&md5=9705da30150789ca11a9c7719d221dac},
doi = {10.3390/jcm11154361},
issn = {20770383},
year = {2022},
date = {2022-01-01},
journal = {Journal of Clinical Medicine},
volume = {11},
number = {15},
abstract = {Background: Natural disasters happen in an increased frequency, and telemental health interventions could offer easily accessible help to reduce mental health symptoms experienced by survivors. However, there are very few programs offered to natural disaster survivors, and no research exists on therapists’ experiences with providing blended interventions for natural disaster survivors. Aims: Our qualitative case study aims to describe psychologists’ experiences with an online, therapist-assisted blended intervention for survivors of the Fort McMurray wildfires in Alberta, Canada. Method: The RESILIENT intervention was developed in the frames of a randomized controlled trial to promote resilience after the Fort McMurray wildfires by providing survivors free access to a 12-module, therapist-assisted intervention, aiming to improve post-traumatic stress, insomnia, and depression symptoms. A focus group design was used to collect data from the therapists, and emerging common themes were identified by thematic analysis. Results: Therapists felt they could build strong alliances and communicate emotions and empathy effectively, although the lack of nonverbal cues posed some challenges. The intervention, according to participating therapists, was less suitable for participants in high-stress situations and in case of discrepancy between client expectations and the intervention content. Moreover, the therapists perceived specific interventions as easy-to-use or as more challenging based on their complexity and on the therapist support needed for executing them. Client engagement in the program emerged as an underlying theme that had fundamental impact on alliance, communication, and ultimately, treatment efficiency. Therapist training and supervision was perceived as crucial for the success of the program delivery. Conclusions: Our findings provided several implications for the optimalization of blended interventions for natural disaster survivors from our therapists’ perspective. © 2022 by the authors.},
note = {Publisher: MDPI},
keywords = {adult, Alberta, article, case study, Depression, emotion, Empathy, expectation, female, human, insomnia, natural disaster, perception, personal experience, posttraumatic stress disorder, psychological resilience, psychotherapist, qualitative analysis, survivor, thematic analysis, therapeutic alliance, wildfire, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Léveillé, E.; Desjardins, M.; Dumel, G.; Blais, C.; Saint-Amour, D.; Scherzer, P.; Beaumont, L. De
Effects of Emotion and Emotional Ambiguity on Early Visual Event-Related Potentials Article de journal
Dans: Emotion, vol. 23, no 3, p. 787–804, 2022, ISSN: 15283542 (ISSN), (Publisher: American Psychological Association).
Résumé | Liens | BibTeX | Étiquettes: adult, article, Brain, clinical article, controlled study, Electroencephalography, electroencephalography (EEG), emotion, emotional facial expression (EFE), event related potential, event-related potential (ERP), Facial Expression, female, human, human experiment, male, mass univariate statistics, photography, univariate analysis
@article{leveille_effects_2022,
title = {Effects of Emotion and Emotional Ambiguity on Early Visual Event-Related Potentials},
author = {E. Léveillé and M. Desjardins and G. Dumel and C. Blais and D. Saint-Amour and P. Scherzer and L. De Beaumont},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85135853427&doi=10.1037%2femo0001119&partnerID=40&md5=7e89d6dee0b96766b8a359a3a9be4d73},
doi = {10.1037/emo0001119},
issn = {15283542 (ISSN)},
year = {2022},
date = {2022-01-01},
journal = {Emotion},
volume = {23},
number = {3},
pages = {787–804},
abstract = {The modulation of early sensory event-related potentials such as the P1, N1, and N170 by emotion and emotional ambiguity is still controversial. Some studies have found a modulation of one or all of these components by one or both of these factors, whereas others have failed to show such results. The objective of this study was to investigate the effects of emotion and ambiguity on the behavioral and electrophysiological responses to a morphed emotion recognition task. Thirty-seven healthy participants (19 men) completed an emotion recognition task where photographs of a male face expressing the six basic emotions morphed with another emotion (in a proportion ranging from 26% to 74%) were randomly presented while electroencephalography was recorded. After each face presentation, participants were asked to identify the facial emotion. We found an emotional effect on the P1, N1, and N170, with greater amplitudes for some emotional facial expressions than for others. However, we found no significant emotional ambiguity effect or interaction between emotion and ambiguity for any of these components. These findings suggest that computation of emotional facial expressions (regardless of their ambiguity) occurs from the early stages of brain processing. © 2022 American Psychological Association},
note = {Publisher: American Psychological Association},
keywords = {adult, article, Brain, clinical article, controlled study, Electroencephalography, electroencephalography (EEG), emotion, emotional facial expression (EFE), event related potential, event-related potential (ERP), Facial Expression, female, human, human experiment, male, mass univariate statistics, photography, univariate analysis},
pubstate = {published},
tppubtype = {article}
}