

de Recherche et d’Innovation
en Cybersécurité et Société
Audette, P. -L.; Côté, L.; Blais, C.; Duncan, J.; Gingras, F.; Fiset, D.
Part-based processing, but not holistic processing, predicts individual differences in face recognition abilities Article de journal
Dans: Cognition, vol. 256, 2025, ISSN: 00100277 (ISSN), (Publisher: Elsevier B.V.).
Résumé | Liens | BibTeX | Étiquettes: Adolescent, adult, article, face perception, face recognition, Facial Recognition, female, human, human experiment, Humans, Individual differences, Individuality, Integration efficiency, male, multiple linear regression analysis, physiology, psychology, Psychophysics, recognition, Young Adult
@article{audette_part-based_2025,
title = {Part-based processing, but not holistic processing, predicts individual differences in face recognition abilities},
author = {P. -L. Audette and L. Côté and C. Blais and J. Duncan and F. Gingras and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85213575434&doi=10.1016%2fj.cognition.2024.106057&partnerID=40&md5=135d2ba1bdf18648b57db0d3a93d0628},
doi = {10.1016/j.cognition.2024.106057},
issn = {00100277 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Cognition},
volume = {256},
abstract = {This study aimed to assess the roles of part-based and holistic processing for face processing ability (FPA). A psychophysical paradigm in which the efficiency at recognizing isolated or combined facial parts was used (N = 64), and holistic processing was defined as the perceptual integration from multiple parts. FPA and object processing ability were measured using a battery of tasks. A multiple linear regression including three predictors, namely perceptual integration, part-based efficiency, and object processing, explained 40 % of the variance in FPA. Most importantly, our results reveal a strong predictive relationship between part-based efficiency and FPA, a small predictive relationship between object processing ability and FPA, and no predictive relationship between perceptual integration and FPA. This result was obtained despite considerable variance in perceptual integration skills–with some participants exhibiting a highly efficient integration. These results indicate that part-based processing plays a pivotal role in FPA, whereas holistic processing does not. © 2024 The Authors},
note = {Publisher: Elsevier B.V.},
keywords = {Adolescent, adult, article, face perception, face recognition, Facial Recognition, female, human, human experiment, Humans, Individual differences, Individuality, Integration efficiency, male, multiple linear regression analysis, physiology, psychology, Psychophysics, recognition, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Blais, C.; Fiset, D.; Côté, L.; Ledrou-Paquet, V.; Charbonneau, I.
Conducting online visual psychophysics experiments: A replication assessment of two face processing studies Article de journal
Dans: Vision Research, vol. 233, 2025, ISSN: 00426989 (ISSN), (Publisher: Elsevier Ltd).
Résumé | Liens | BibTeX | Étiquettes: adult, article, cultural factor, experiment, Facial Recognition, female, geography, human, human experiment, Humans, information processing, laboratory, male, normal human, online system, Photic Stimulation, photostimulation, physiology, procedures, psychology, Psychophysics, recognition, stimulus response, vision, visual stimulation, Young Adult
@article{blais_conducting_2025,
title = {Conducting online visual psychophysics experiments: A replication assessment of two face processing studies},
author = {C. Blais and D. Fiset and L. Côté and V. Ledrou-Paquet and I. Charbonneau},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105004807446&doi=10.1016%2fj.visres.2025.108617&partnerID=40&md5=771b056e57c4d7a34ff7c56ce39a4bd2},
doi = {10.1016/j.visres.2025.108617},
issn = {00426989 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Vision Research},
volume = {233},
abstract = {In vision sciences, researchers rigorously control the testing environment and the physical properties of stimuli, making it challenging to conduct visual perception experiments online. However, online research offers key advantages, including access to larger and more diverse participant samples, helping to address the problem of underpowered studies and to enhance the generalizability of results. In face recognition research, increasing diversity is essential, especially considering evidence that cultural and geographical factors influence basic visual face processing. The present study tested a new online platform, Pack & Go from VPixx Technologies, that supports experiments written in MATLAB and Python. Two face recognition experiments based on a data-driven psychophysical method involving real-time stimulus manipulation and relying on functions from the Psychtoolbox were tested. In Experiment 1, the visual information used for face recognition was compared across four conditions that gradually reduced experimental control over the testing environment and stimulus properties. In Experiment 2, the association between face recognition abilities and information utilization was measured online and compared to lab-based results. In both experiments, results obtained in the lab and online were highly similar, demonstrating the potential of online research for vision science. © 2025 The Author(s)},
note = {Publisher: Elsevier Ltd},
keywords = {adult, article, cultural factor, experiment, Facial Recognition, female, geography, human, human experiment, Humans, information processing, laboratory, male, normal human, online system, Photic Stimulation, photostimulation, physiology, procedures, psychology, Psychophysics, recognition, stimulus response, vision, visual stimulation, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Bérubé, A.; Pearson, J.; Blais, C.; Forget, H.
Stress and emotion recognition predict the relationship between a history of maltreatment and sensitive parenting behaviors: A moderated-moderation Article de journal
Dans: Development and Psychopathology, vol. 37, no 1, p. 281–291, 2025, ISSN: 09545794 (ISSN), (Publisher: Cambridge University Press).
Résumé | Liens | BibTeX | Étiquettes: adult, Adult Survivors of Child Abuse, chemistry, Child, Child Abuse, child abuse survivor, child parent relation, childhood maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, female, human, Humans, Hydrocortisone, male, mental stress, metabolism, mother, mother child relation, Mother-Child Relations, Mothers, Parenting, physiology, Preschool, preschool child, Psychological, psychology, Saliva, sensitivity, Stress, stress reactivity
@article{berube_stress_2025,
title = {Stress and emotion recognition predict the relationship between a history of maltreatment and sensitive parenting behaviors: A moderated-moderation},
author = {A. Bérubé and J. Pearson and C. Blais and H. Forget},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85182715913&doi=10.1017%2fS095457942300158X&partnerID=40&md5=b3a9056662cf94740131bfd6fbe7352e},
doi = {10.1017/S095457942300158X},
issn = {09545794 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Development and Psychopathology},
volume = {37},
number = {1},
pages = {281–291},
abstract = {Our study proposes to examine how stress and emotion recognition interact with a history of maltreatment to influence sensitive parenting behaviors. A sample of 58 mothers and their children aged between 2 and 5 years old were recruited. Parents' history of maltreatment was measured using the Child Trauma Questionnaire. An emotion recognition task was performed. Mothers identified the dominant emotion in morphed facial emotion expressions in children. Mothers and children interacted for 15 minutes. Salivary cortisol levels of mothers were collected before and after the interaction. Maternal sensitive behaviors were coded during the interaction using the Coding Interactive Behavior scheme. Results indicate that the severity of childhood maltreatment is related to less sensitive behaviors for mothers with average to good abilities in emotion recognition and lower to average increases in cortisol levels following an interaction with their children. For mothers with higher cortisol levels, there is no association between a history of maltreatment and sensitive behaviors, indicating that higher stress reactivity could act as a protective factor. Our study highlights the complex interaction between individual characteristics and environmental factors when it comes to parenting. These results argue for targeted interventions that address personal trauma. © 2024 The Author(s).},
note = {Publisher: Cambridge University Press},
keywords = {adult, Adult Survivors of Child Abuse, chemistry, Child, Child Abuse, child abuse survivor, child parent relation, childhood maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, female, human, Humans, Hydrocortisone, male, mental stress, metabolism, mother, mother child relation, Mother-Child Relations, Mothers, Parenting, physiology, Preschool, preschool child, Psychological, psychology, Saliva, sensitivity, Stress, stress reactivity},
pubstate = {published},
tppubtype = {article}
}
Charbonneau, I.; Duncan, J.; Blais, C.; Guérette, J.; Plouffe-Demers, M. -P.; Smith, F.; Fiset, D.
Facial expression categorization predominantly relies on mid-spatial frequencies Article de journal
Dans: Vision Research, vol. 231, 2025, ISSN: 00426989 (ISSN), (Publisher: Elsevier Ltd).
Résumé | Liens | BibTeX | Étiquettes: adult, article, Bubbles, Classification, controlled study, emotion, Emotions, Facial Expression, facial expressions, Facial Recognition, female, human, Humans, male, physiology, Psychophysics, simulation, Spatial frequencies, Young Adult
@article{charbonneau_facial_2025,
title = {Facial expression categorization predominantly relies on mid-spatial frequencies},
author = {I. Charbonneau and J. Duncan and C. Blais and J. Guérette and M. -P. Plouffe-Demers and F. Smith and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105003427898&doi=10.1016%2fj.visres.2025.108611&partnerID=40&md5=19b14eb2487f220c3e41cbce28fa5287},
doi = {10.1016/j.visres.2025.108611},
issn = {00426989 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Vision Research},
volume = {231},
abstract = {Facial expressions are crucial in human communication. Recent decades have seen growing interest in understanding the role of spatial frequencies (SFs) in emotion perception in others. While some studies have suggested a preferential treatment of low versus high SFs, the optimal SFs for recognizing basic facial expressions remain elusive. This study, conducted on Western participants, addresses this gap using two complementary methods: a data-driven method (Exp. 1) without arbitrary SF cut-offs, and a more naturalistic method (Exp. 2) simulating variations in viewing distance. Results generally showed a preponderant role of low over high SFs, but particularly stress that facial expression categorization mostly relies on mid-range SF content (i.e. ∼6–13 cycles per face), often overlooked in previous studies. Optimal performance was observed at short to medium viewing distances (1.2–2.4 m), declining sharply with increased distance, precisely when mid-range SFs were no longer available. Additionally, our data suggest variations in SF tuning profiles across basic facial expressions and nuanced contributions from low and mid SFs in facial expression processing. Most importantly, it suggests that any method that removes mid-SF content has the downfall of offering an incomplete account of SFs diagnosticity for facial expression recognition. © 2025 The Authors},
note = {Publisher: Elsevier Ltd},
keywords = {adult, article, Bubbles, Classification, controlled study, emotion, Emotions, Facial Expression, facial expressions, Facial Recognition, female, human, Humans, male, physiology, Psychophysics, simulation, Spatial frequencies, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Joudeh, I. O.; Cretu, A. -M.; Bouchard, S.
Predicting the Arousal and Valence Values of Emotional States Using Learned, Predesigned, and Deep Visual Features † Article de journal
Dans: Sensors, vol. 24, no 13, 2024, ISSN: 14248220 (ISSN), (Publisher: Multidisciplinary Digital Publishing Institute (MDPI)).
Résumé | Liens | BibTeX | Étiquettes: adult, Affective interaction, Arousal, artificial neural network, Cognitive state, Cognitive/emotional state, Collaborative interaction, computer, Convolutional neural networks, correlation coefficient, Deep learning, emotion, Emotional state, Emotions, female, Forecasting, Helmet mounted displays, human, Humans, Learning algorithms, Learning systems, Long short-term memory, Machine learning, Machine-learning, male, Mean square error, Neural networks, physiology, Regression, Root mean squared errors, Video recording, virtual reality, Visual feature, visual features
@article{joudeh_predicting_2024,
title = {Predicting the Arousal and Valence Values of Emotional States Using Learned, Predesigned, and Deep Visual Features †},
author = {I. O. Joudeh and A. -M. Cretu and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85198382238&doi=10.3390%2fs24134398&partnerID=40&md5=cefa8b2e2c044d02f99662af350007db},
doi = {10.3390/s24134398},
issn = {14248220 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Sensors},
volume = {24},
number = {13},
abstract = {The cognitive state of a person can be categorized using the circumplex model of emotional states, a continuous model of two dimensions: arousal and valence. The purpose of this research is to select a machine learning model(s) to be integrated into a virtual reality (VR) system that runs cognitive remediation exercises for people with mental health disorders. As such, the prediction of emotional states is essential to customize treatments for those individuals. We exploit the Remote Collaborative and Affective Interactions (RECOLA) database to predict arousal and valence values using machine learning techniques. RECOLA includes audio, video, and physiological recordings of interactions between human participants. To allow learners to focus on the most relevant data, features are extracted from raw data. Such features can be predesigned, learned, or extracted implicitly using deep learners. Our previous work on video recordings focused on predesigned and learned visual features. In this paper, we extend our work onto deep visual features. Our deep visual features are extracted using the MobileNet-v2 convolutional neural network (CNN) that we previously trained on RECOLA’s video frames of full/half faces. As the final purpose of our work is to integrate our solution into a practical VR application using head-mounted displays, we experimented with half faces as a proof of concept. The extracted deep features were then used to predict arousal and valence values via optimizable ensemble regression. We also fused the extracted visual features with the predesigned visual features and predicted arousal and valence values using the combined feature set. In an attempt to enhance our prediction performance, we further fused the predictions of the optimizable ensemble model with the predictions of the MobileNet-v2 model. After decision fusion, we achieved a root mean squared error (RMSE) of 0.1140, a Pearson’s correlation coefficient (PCC) of 0.8000, and a concordance correlation coefficient (CCC) of 0.7868 on arousal predictions. We achieved an RMSE of 0.0790, a PCC of 0.7904, and a CCC of 0.7645 on valence predictions. © 2024 by the authors.},
note = {Publisher: Multidisciplinary Digital Publishing Institute (MDPI)},
keywords = {adult, Affective interaction, Arousal, artificial neural network, Cognitive state, Cognitive/emotional state, Collaborative interaction, computer, Convolutional neural networks, correlation coefficient, Deep learning, emotion, Emotional state, Emotions, female, Forecasting, Helmet mounted displays, human, Humans, Learning algorithms, Learning systems, Long short-term memory, Machine learning, Machine-learning, male, Mean square error, Neural networks, physiology, Regression, Root mean squared errors, Video recording, virtual reality, Visual feature, visual features},
pubstate = {published},
tppubtype = {article}
}
Pétrin, R.; Bérubé, A.; St-Pierre, É.; Blais, C.
Maternal childhood emotional abuse increases cardiovascular responses to children’s emotional facial expressions Article de journal
Dans: PLoS ONE, vol. 19, no 5 May, 2024, ISSN: 19326203 (ISSN), (Publisher: Public Library of Science).
Résumé | Liens | BibTeX | Étiquettes: adult, alcohol consumption, analysis of variance, article, blood pressure, cardiovascular response, Child, Child Abuse, Childhood, Childhood Trauma Questionnaire, demographics, electrocardiogram, Electrocardiography, emotion, Emotional Abuse, Emotions, Ethnicity, Facial Expression, female, Heart Rate, heart rate variability, human, human experiment, Humans, Likert scale, male, mother, mother child relation, Mother-Child Relations, Mothers, parasympathetic tone, physical activity, physiology, post hoc analysis, psychology, questionnaire, sexual abuse, Surveys and Questionnaires
@article{petrin_maternal_2024,
title = {Maternal childhood emotional abuse increases cardiovascular responses to children’s emotional facial expressions},
author = {R. Pétrin and A. Bérubé and É. St-Pierre and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85192637581&doi=10.1371%2fjournal.pone.0302782&partnerID=40&md5=c464b30fe7cc5b7b0baaf865fdf1f6de},
doi = {10.1371/journal.pone.0302782},
issn = {19326203 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {PLoS ONE},
volume = {19},
number = {5 May},
abstract = {Parents with a history of childhood maltreatment may be more likely to respond inadequately to their child’s emotional cues, such as crying or screaming, due to previous exposure to prolonged stress. While studies have investigated parents’ physiological reactions to their children’s vocal expressions of emotions, less attention has been given to their responses when perceiving children’s facial expressions of emotions. The present study aimed to determine if viewing facial expressions of emotions in children induces cardiovascular changes in mothers (hypo- or hyper-arousal) and whether these differ as a function of childhood maltreatment. A total of 104 mothers took part in this study. Their experiences of childhood maltreatment were measured using the Childhood Trauma Questionnaire (CTQ). Participants’ electrocardiogram signals were recorded during a task in which they viewed a landscape video (baseline) and images of children’s faces expressing different intensities of emotion. Heart rate variability (HRV) was extracted from the recordings as an indicator of parasympathetic reactivity. Participants presented two profiles: one group of mothers had a decreased HRV when presented with images of children’s facial expressions of emotions, while the other group’s HRV increased. However, HRV change was not significantly different between the two groups. The interaction between HRV groups and the severity of maltreatment experienced was marginal. Results suggested that experiences of childhood emotional abuse were more common in mothers whose HRV increased during the task. Therefore, more severe childhood experiences of emotional abuse could be associated with mothers’ cardiovascular hyperreactivity. Maladaptive cardiovascular responses could have a ripple effect, influencing how mothers react to their children’s facial expressions of emotions. That reaction could affect the quality of their interaction with their child. Providing interventions that help parents regulate their physiological and behavioral responses to stress might be helpful, especially if they have experienced childhood maltreatment. © 2024 Public Library of Science. All rights reserved.},
note = {Publisher: Public Library of Science},
keywords = {adult, alcohol consumption, analysis of variance, article, blood pressure, cardiovascular response, Child, Child Abuse, Childhood, Childhood Trauma Questionnaire, demographics, electrocardiogram, Electrocardiography, emotion, Emotional Abuse, Emotions, Ethnicity, Facial Expression, female, Heart Rate, heart rate variability, human, human experiment, Humans, Likert scale, male, mother, mother child relation, Mother-Child Relations, Mothers, parasympathetic tone, physical activity, physiology, post hoc analysis, psychology, questionnaire, sexual abuse, Surveys and Questionnaires},
pubstate = {published},
tppubtype = {article}
}
Bérubé, A.; Turgeon, J.; Blais, C.; Fiset, D.
Emotion Recognition in Adults With a History of Childhood Maltreatment: A Systematic Review Article de journal
Dans: Trauma, Violence, and Abuse, vol. 24, no 1, p. 278–294, 2023, ISSN: 15248380 (ISSN), (Publisher: SAGE Publications Ltd).
Résumé | Liens | BibTeX | Étiquettes: adult, anger, Child, Child Abuse, childhood maltreatment, cycle of maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, Fear, human, Humans, perception, physiology, psychology, systematic review
@article{berube_emotion_2023,
title = {Emotion Recognition in Adults With a History of Childhood Maltreatment: A Systematic Review},
author = {A. Bérubé and J. Turgeon and C. Blais and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85109658115&doi=10.1177%2f15248380211029403&partnerID=40&md5=5654c858d5c0c84bfdd832a4c04dd1d5},
doi = {10.1177/15248380211029403},
issn = {15248380 (ISSN)},
year = {2023},
date = {2023-01-01},
journal = {Trauma, Violence, and Abuse},
volume = {24},
number = {1},
pages = {278–294},
abstract = {Child maltreatment has many well-documented lasting effects on children. Among its consequences, it affects children’s recognition of emotions. More and more studies are recognizing the lasting effect that a history of maltreatment can have on emotion recognition. A systematic literature review was conducted to better understand this relationship. The Preferred Reporting Items for Systematic Reviews and Meta-Analyses (PRISMA) protocol was used and four databases were searched, MEDLINE/PubMed, PsycINFO, EMBASE, and FRANCIS, using three cross-referenced key words: child abuse, emotion recognition, and adults. The search process identified 23 studies that met the inclusion criteria. The review highlights the wide variety of measures used to assess child maltreatment as well as the different protocols used to measure emotion recognition. The results indicate that adults with a history of childhood maltreatment show a differentiated reaction to happiness, anger, and fear. Happiness is less detected, whereas negative emotions are recognized more rapidly and at a lower intensity compared to adults not exposed to such traumatic events. Emotion recognition is also related to greater brain activation for the maltreated group. However, the results are less consistent for adults who also have a diagnosis of mental health problems. The systematic review found that maltreatment affects the perception of emotions expressed on both adult and child faces. However, more research is needed to better understand how a history of maltreatment is related to adults’ perception of children’s emotions. © The Author(s) 2021.},
note = {Publisher: SAGE Publications Ltd},
keywords = {adult, anger, Child, Child Abuse, childhood maltreatment, cycle of maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, Fear, human, Humans, perception, physiology, psychology, systematic review},
pubstate = {published},
tppubtype = {article}
}
Joudeh, I. O.; Cretu, A. -M.; Bouchard, S.; Guimond, S.
Prediction of Continuous Emotional Measures through Physiological and Visual Data † Article de journal
Dans: Sensors, vol. 23, no 12, 2023, ISSN: 14248220, (Publisher: MDPI).
Résumé | Liens | BibTeX | Étiquettes: Affect recognition, Affective state, Arousal, Data-source, Deep learning, Electrocardiography, emotion, Emotion Recognition, Emotions, face recognition, Faces detection, Forecasting, human, Humans, Images processing, Learning systems, Machine learning, Machine-learning, mental disease, Mental Disorders, Physiological data, physiology, Signal-processing, Statistical tests, Video recording, Virtual-reality environment
@article{joudeh_prediction_2023,
title = {Prediction of Continuous Emotional Measures through Physiological and Visual Data †},
author = {I. O. Joudeh and A. -M. Cretu and S. Bouchard and S. Guimond},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85163943735&doi=10.3390%2fs23125613&partnerID=40&md5=5e970f0d8c5790b85d8d77a9f3f52a2d},
doi = {10.3390/s23125613},
issn = {14248220},
year = {2023},
date = {2023-01-01},
journal = {Sensors},
volume = {23},
number = {12},
abstract = {The affective state of a person can be measured using arousal and valence values. In this article, we contribute to the prediction of arousal and valence values from various data sources. Our goal is to later use such predictive models to adaptively adjust virtual reality (VR) environments and help facilitate cognitive remediation exercises for users with mental health disorders, such as schizophrenia, while avoiding discouragement. Building on our previous work on physiological, electrodermal activity (EDA) and electrocardiogram (ECG) recordings, we propose improving preprocessing and adding novel feature selection and decision fusion processes. We use video recordings as an additional data source for predicting affective states. We implement an innovative solution based on a combination of machine learning models alongside a series of preprocessing steps. We test our approach on RECOLA, a publicly available dataset. The best results are obtained with a concordance correlation coefficient (CCC) of 0.996 for arousal and 0.998 for valence using physiological data. Related work in the literature reported lower CCCs on the same data modality; thus, our approach outperforms the state-of-the-art approaches for RECOLA. Our study underscores the potential of using advanced machine learning techniques with diverse data sources to enhance the personalization of VR environments. © 2023 by the authors.},
note = {Publisher: MDPI},
keywords = {Affect recognition, Affective state, Arousal, Data-source, Deep learning, Electrocardiography, emotion, Emotion Recognition, Emotions, face recognition, Faces detection, Forecasting, human, Humans, Images processing, Learning systems, Machine learning, Machine-learning, mental disease, Mental Disorders, Physiological data, physiology, Signal-processing, Statistical tests, Video recording, Virtual-reality environment},
pubstate = {published},
tppubtype = {article}
}
Pauzé, A.; Plouffe-Demers, M. -P.; Fiset, D.; Saint-Amour, D.; Cyr, C.; Blais, C.
The relationship between orthorexia nervosa symptomatology and body image attitudes and distortion Article de journal
Dans: Scientific reports, vol. 11, no 1, p. 13311, 2021, ISSN: 20452322, (Publisher: NLM (Medline)).
Résumé | Liens | BibTeX | Étiquettes: adult, Attitude, body image, Diet, eating disorder, Feeding and Eating Disorders, feeding behavior, female, Healthy, human, Humans, male, pathophysiology, physiology, procedures, questionnaire, Surveys and Questionnaires
@article{pauze_relationship_2021,
title = {The relationship between orthorexia nervosa symptomatology and body image attitudes and distortion},
author = {A. Pauzé and M. -P. Plouffe-Demers and D. Fiset and D. Saint-Amour and C. Cyr and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85111785584&doi=10.1038%2fs41598-021-92569-2&partnerID=40&md5=3360e3096eab30cba8259423f69d1d87},
doi = {10.1038/s41598-021-92569-2},
issn = {20452322},
year = {2021},
date = {2021-01-01},
journal = {Scientific reports},
volume = {11},
number = {1},
pages = {13311},
abstract = {Orthorexia Nervosa (ON), a condition characterized by a fixation on healthy eating, still does not conform to any consensus concerning diagnostic criteria, notably in regard to a possible body image component. This study investigated the relationship between ON symptomatology, measured with the Eating Habit Questionnaire, and body image attitudes and body image distortion in a non-clinical sample. Explicit body image attitudes and distortion were measured using the Multidimensional Body-Self Relations Questionnaire. Implicit body image attitudes and distortion were assessed using the reverse correlation technique. Correlational analyses showed that ON is associated with both explicit and implicit attitudes and distortion toward body image. More precisely, multivariate analyses combining various body image components showed that ON is mostly associated with explicit overweight preoccupation, explicit investment in physical health and leading a healthy lifestyle, and implicit muscularity distortion. These findings suggest that ON symptomatology is positively associated with body image attitudes and distortion in a non-clinical sample. However, further studies should be conducted to better understand how ON symptomatology relates to body image, especially among clinical samples.},
note = {Publisher: NLM (Medline)},
keywords = {adult, Attitude, body image, Diet, eating disorder, Feeding and Eating Disorders, feeding behavior, female, Healthy, human, Humans, male, pathophysiology, physiology, procedures, questionnaire, Surveys and Questionnaires},
pubstate = {published},
tppubtype = {article}
}
Bérubé, A.; Blais, C.; Fournier, A.; Turgeon, J.; Forget, H.; Coutu, S.; Dubeau, D.
Childhood maltreatment moderates the relationship between emotion recognition and maternal sensitive behaviors Article de journal
Dans: Child Abuse and Neglect, vol. 102, 2020, ISSN: 01452134 (ISSN), (Publisher: Elsevier Ltd).
Résumé | Liens | BibTeX | Étiquettes: article, Child, Child Abuse, childhood maltreatment, Childhood Trauma Questionnaire, emotion, Emotion Recognition, Emotions, female, human, human experiment, Humans, male, Maternal Behavior, mother child relation, Mother-Child Relations, photography, physiology, Preschool, preschool child, psychology, Sensitive behaviors
@article{berube_childhood_2020,
title = {Childhood maltreatment moderates the relationship between emotion recognition and maternal sensitive behaviors},
author = {A. Bérubé and C. Blais and A. Fournier and J. Turgeon and H. Forget and S. Coutu and D. Dubeau},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85079890346&doi=10.1016%2fj.chiabu.2020.104432&partnerID=40&md5=05add864de22734e614fe7a34d6d6f1a},
doi = {10.1016/j.chiabu.2020.104432},
issn = {01452134 (ISSN)},
year = {2020},
date = {2020-01-01},
journal = {Child Abuse and Neglect},
volume = {102},
abstract = {Background: Sensitivity is defined as parents ability to perceive, react and respond to children signals. Having a history of childhood maltreatment changes the way adults perceive visual emotions. These perceptual characteristics could have important consequences on how these parents respond to their children. Objective: The current study examines how a history of childhood maltreatment moderates the relationship between maternal emotion recognition in child faces and sensitive behaviors toward their child during free-play and a structured task. Participants and Setting: Participants included 58 mothers and their children aged between 2 and 5 years. Methods: Mothers were exposed to a set of photographs of child faces showing morphed images of the six basic emotional expressions. Mother-child interactions were then coded for sensitive behaviors. Mothers’ history of childhood maltreatment was assessed using the Childhood Trauma Questionnaire. Results: Maltreatment severity was related to poorer abilities in emotion recognition. However, the association between emotion recognition and sensitive behavior was moderate by history of childhood maltreatment. For mothers exposed to a severe form of childhood maltreatment, a better emotion recognition was related to less sensitive behaviors toward the child, both during free-play and the structured task. Conclusion: This relationship is unique to these mothers and is inconsistent with Ainsworth's definition of sensitivity. These results have important implications as they suggest mothers with a history of severe maltreatment would need tailored interventions which take into account their particular reactions to children's emotions. © 2020},
note = {Publisher: Elsevier Ltd},
keywords = {article, Child, Child Abuse, childhood maltreatment, Childhood Trauma Questionnaire, emotion, Emotion Recognition, Emotions, female, human, human experiment, Humans, male, Maternal Behavior, mother child relation, Mother-Child Relations, photography, physiology, Preschool, preschool child, psychology, Sensitive behaviors},
pubstate = {published},
tppubtype = {article}
}