

de Recherche et d’Innovation
en Cybersécurité et Société
Bérubé, A.; Pétrin, R.; Blais, C.
Parental depression moderates the relationship between childhood maltreatment and the recognition of children expressions of emotions Article de journal
Dans: Frontiers in Psychiatry, vol. 15, 2024, ISSN: 16640640 (ISSN), (Publisher: Frontiers Media SA).
Résumé | Liens | BibTeX | Étiquettes: adult, anger, article, Beck Depression Inventory, Child, Child Abuse, child parent relation, childhood maltreatment, Childhood Trauma Questionnaire, Depression, disease severity, disgust, educational status, emotion, Emotion Recognition, Facial Expression, female, happiness, human, income, major clinical study, male, parent-child relationship, parental sensitivity, preschool child, questionnaire, recognition, sadness
@article{berube_parental_2024,
title = {Parental depression moderates the relationship between childhood maltreatment and the recognition of children expressions of emotions},
author = {A. Bérubé and R. Pétrin and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85196266525&doi=10.3389%2ffpsyt.2024.1374872&partnerID=40&md5=ce03a1c39e709fc0f2c773d4f82f3a10},
doi = {10.3389/fpsyt.2024.1374872},
issn = {16640640 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Frontiers in Psychiatry},
volume = {15},
abstract = {Background: Sensitivity plays a crucial role in parenting as it involves the ability to perceive and respond appropriately to children’s signals. Childhood maltreatment and depression can negatively impact adults’ ability to recognize emotions, but it is unclear which of these factors has a greater impact or how they interact. This knowledge is central to developing efficient, targeted interventions. This paper examines the interaction between parents’ depressive symptoms and childhood maltreatment and its influence on their ability to recognize the five basic emotions (happiness, anger, sadness, fear, and disgust) in children’s faces. Method: The sample consisted of 52 parents. Depressive symptoms were measured by the depression subscale of the Brief Symptom Inventory-18 (BSI-18), and maltreatment history was assessed by the Childhood Trauma Questionnaire (CTQ). Children’s emotional stimuli were morphed images created using The Child Affective Facial Expression (CAFE) database. Results: Our findings indicate that depressive symptoms moderate the relationship between parents’ history of childhood maltreatment and emotion recognition skills. Parents with higher depressive symptoms had lower emotion recognition accuracy when they had not experienced maltreatment. When childhood maltreatment was severe, emotion recognition skills were more consistent across all levels of depression. The relationship between depression and emotion recognition was primarily linked to recognizing sadness in children’s faces. Conclusion: These findings highlight how different experiences can affect parental abilities in emotion recognition and emphasize the need for interventions tailored to individual profiles to improve their effectiveness. Copyright © 2024 Bérubé, Pétrin and Blais.},
note = {Publisher: Frontiers Media SA},
keywords = {adult, anger, article, Beck Depression Inventory, Child, Child Abuse, child parent relation, childhood maltreatment, Childhood Trauma Questionnaire, Depression, disease severity, disgust, educational status, emotion, Emotion Recognition, Facial Expression, female, happiness, human, income, major clinical study, male, parent-child relationship, parental sensitivity, preschool child, questionnaire, recognition, sadness},
pubstate = {published},
tppubtype = {article}
}
Turgeon, J.; Berube, A.; Blais, C.; Lemieux, A.; Fournier, A.
Recognition of children's emotional facial expressions among mothers reporting a history of childhood maltreatment Article de journal
Dans: PLoS ONE, vol. 15, no 12 December, 2020, ISSN: 19326203, (Publisher: Public Library of Science).
Résumé | Liens | BibTeX | Étiquettes: adult, Adverse Childhood Experiences, anger, article, Child, Child Abuse, Childhood Trauma Questionnaire, disgust, emotion, emotional neglect, Emotions, Facial Expression, Facial Recognition, Fear, female, happiness, human, Humans, major clinical study, male, mother, Mothers, parenthood, path analysis, physical abuse, Preschool, preschool child, psychology, recognition, Retrospective Studies, retrospective study, sadness, self report, sexual abuse, structural equation modeling, Young Adult
@article{turgeon_recognition_2020,
title = {Recognition of children's emotional facial expressions among mothers reporting a history of childhood maltreatment},
author = {J. Turgeon and A. Berube and C. Blais and A. Lemieux and A. Fournier},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85098916379&doi=10.1371%2fjournal.pone.0243083&partnerID=40&md5=2ef477465c0ad75d67b7f13d05f783b2},
doi = {10.1371/journal.pone.0243083},
issn = {19326203},
year = {2020},
date = {2020-01-01},
journal = {PLoS ONE},
volume = {15},
number = {12 December},
abstract = {Several studies have shown that child maltreatment is associated with both positive and negative effects on the recognition of facial emotions. Research has provided little evidence of a relation between maltreatment during childhood and young adults' ability to recognize facial displays of emotion in children, an essential skill for a sensitive parental response. In this study, we examined the consequences of different forms of maltreatment experienced in childhood on emotion recognition during parenthood. Participants included sixty-three mothers of children aged 2 to 5 years. Retrospective self-reports of childhood maltreatment were assessed using the short form of the Childhood Trauma Questionnaire (CTQ). Emotion recognition was measured using a morphed facial emotion identification task of all six basic emotions (anger, disgust, fear, happiness, sadness, and surprise). A Path Analysis via Structural Equation Model revealed that a history of physical abuse is related to a decreased ability to recognize both fear and sadness in children, whereas emotional abuse and sexual abuse are related to a decreased ability to recognize anger in children. In addition, emotional neglect is associated with an increased ability to recognize anger, whereas physical neglect is associated with less accuracy in recognizing happiness in children's facial emotional expressions. These findings have important clinical implications and expand current understanding of the consequences of childhood maltreatment on parents' ability to detect children's needs. © 2020 Turgeon et al.},
note = {Publisher: Public Library of Science},
keywords = {adult, Adverse Childhood Experiences, anger, article, Child, Child Abuse, Childhood Trauma Questionnaire, disgust, emotion, emotional neglect, Emotions, Facial Expression, Facial Recognition, Fear, female, happiness, human, Humans, major clinical study, male, mother, Mothers, parenthood, path analysis, physical abuse, Preschool, preschool child, psychology, recognition, Retrospective Studies, retrospective study, sadness, self report, sexual abuse, structural equation modeling, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Quintana, P.; Nolet, K.; Baus, O.; Bouchard, S.
The effect of exposure to fear-related body odorants on anxiety and interpersonal trust toward a virtual character Article de journal
Dans: Chemical Senses, vol. 44, no 9, p. 683–692, 2019, ISSN: 0379864X, (Publisher: Oxford University Press).
Résumé | Liens | BibTeX | Étiquettes: adult, Anxiety, article, body odor, body odorant, chemistry, controlled study, emotion, Emotions, exposure, Fear, female, fragrance, happiness, human, human experiment, Humans, male, metabolism, Middle Aged, odor, Odorants, pathology, Pleasure, priority journal, Sweat, sweating, Trust, unclassified drug, virtual reality, Young Adult
@article{quintana_effect_2019,
title = {The effect of exposure to fear-related body odorants on anxiety and interpersonal trust toward a virtual character},
author = {P. Quintana and K. Nolet and O. Baus and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85074305238&doi=10.1093%2fchemse%2fbjz063&partnerID=40&md5=50ec319370e1688498abfa845c7ec343},
doi = {10.1093/chemse/bjz063},
issn = {0379864X},
year = {2019},
date = {2019-01-01},
journal = {Chemical Senses},
volume = {44},
number = {9},
pages = {683–692},
abstract = {A growing body of literature documents how exposure to another person's fear-related body odorants can increase one's own anxiety and interfere with processing of social information, such as facial expression and impression formation. Building on these results, we aimed to 1) test the hypothesis that exposure to fear-related odorant would affect impression formation through fear contagion and 2) verify whether these effects can be observed in an ecologically valid (i.e., virtual) environment. We proposed that exposure to fear-related odorant would cause receivers to feel more anxious, which in turn would lead them to report less trust toward an unknown virtual character. This study had 2 distinct phases. First, we collected perspiration odorants from the armpits of 12 male senders (i.e., the source of the odorant) during the viewing of either fear or joy inducing film clips. In the second phase, 53 women receivers were exposed to either a fear, joy, or neutral odorant (i.e., between-subjects design) by breathing through a gauze attached to a disposable respirator mask while immersed in a virtual bar. As expected, receivers exposed to fear odorants felt significantly more stressed. Mediation analysis also revealed an indirect effect of exposure on trust through anxiety. More specifically, the more anxious the receiver felt, the less she trusted the virtual character. Our results show for the first time that the impact of exposure to fear-related body odorants on negative interpersonal impression formation is mediated by the anxiety induced in the receiver. © 2019 The Author(s) 2019. Published by Oxford University Press. All rights reserved.},
note = {Publisher: Oxford University Press},
keywords = {adult, Anxiety, article, body odor, body odorant, chemistry, controlled study, emotion, Emotions, exposure, Fear, female, fragrance, happiness, human, human experiment, Humans, male, metabolism, Middle Aged, odor, Odorants, pathology, Pleasure, priority journal, Sweat, sweating, Trust, unclassified drug, virtual reality, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Saumure, C.; Plouffe-Demers, M. -P.; Estéphan, A.; Fiset, D.; Blais, C.
The use of visual information in the recognition of posed and spontaneous facial expressions Article de journal
Dans: Journal of Vision, vol. 18, no 9, p. 1–15, 2018, ISSN: 15347362, (Publisher: Association for Research in Vision and Ophthalmology Inc.).
Résumé | Liens | BibTeX | Étiquettes: association, Cues, emotion, Emotions, Facial Expression, Facial Recognition, female, happiness, human, Humans, male, Pattern Recognition, physiology, Visual, Young Adult
@article{saumure_use_2018,
title = {The use of visual information in the recognition of posed and spontaneous facial expressions},
author = {C. Saumure and M. -P. Plouffe-Demers and A. Estéphan and D. Fiset and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85054591286&doi=10.1167%2f18.9.21&partnerID=40&md5=9d2396b70438842c089a36f6a499f734},
doi = {10.1167/18.9.21},
issn = {15347362},
year = {2018},
date = {2018-01-01},
journal = {Journal of Vision},
volume = {18},
number = {9},
pages = {1–15},
abstract = {Recognizing facial expressions is crucial for the success of social interactions, and the visual processes underlying this ability have been the subject of many studies in the field of face perception. Nevertheless, the stimuli used in the majority of these studies consist of facial expressions that were produced on request rather than spontaneously induced. In the present study, we directly compared the visual strategies underlying the recognition of posed and spontaneous expressions of happiness, disgust, surprise, and sadness. We used the Bubbles method with pictures of the same individuals spontaneously expressing an emotion or posing with an expression on request. Two key findings were obtained: Visual strategies were less systematic with spontaneous than with posed expressions, suggesting a higher heterogeneity in the useful facial cues across identities; and with spontaneous expressions, the relative reliance on the mouth and eyes areas was more evenly distributed, contrasting with the higher reliance on the mouth compared to the eyes area observed with posed expressions. © 2018 The Authors.},
note = {Publisher: Association for Research in Vision and Ophthalmology Inc.},
keywords = {association, Cues, emotion, Emotions, Facial Expression, Facial Recognition, female, happiness, human, Humans, male, Pattern Recognition, physiology, Visual, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Roy, C.; Blais, C.; Fiset, D.; Rainville, P.; Gosselin, F.
Efficient information for recognizing pain in facial expressions Article de journal
Dans: European Journal of Pain (United Kingdom), vol. 19, no 6, p. 852–860, 2015, ISSN: 10903801 (ISSN).
Résumé | Liens | BibTeX | Étiquettes: anger, article, association, Classification, Cues, disgust, emotion, Emotions, Facial Expression, Fear, female, happiness, human, human experiment, Humans, male, nociception, normal human, Pain, pain assessment, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, procedures, random sample, reproducibility, Reproducibility of Results, sadness, statistical significance, Visual, visual information, visual stimulation
@article{roy_efficient_2015,
title = {Efficient information for recognizing pain in facial expressions},
author = {C. Roy and C. Blais and D. Fiset and P. Rainville and F. Gosselin},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84929122739&doi=10.1002%2fejp.676&partnerID=40&md5=027f6da7b6d5c98c86de6a07766fb83d},
doi = {10.1002/ejp.676},
issn = {10903801 (ISSN)},
year = {2015},
date = {2015-01-01},
journal = {European Journal of Pain (United Kingdom)},
volume = {19},
number = {6},
pages = {852–860},
abstract = {Background The face as a visual stimulus is a reliable source of information for judging the pain experienced by others. Until now, most studies investigating the facial expression of pain have used a descriptive method (i.e. Facial Action Coding System). However, the facial features that are relevant for the observer in the identification of the expression of pain remain largely unknown despite the strong medical impact that misjudging pain can have on patients' well-being. Methods Here, we investigated this question by applying the Bubbles method. Fifty healthy volunteers were asked to categorize facial expressions (the six basic emotions, pain and neutrality) displayed in stimuli obtained from a previously validated set and presented for 500 ms each. To determine the critical areas of the face used in this categorization task, the faces were partly masked based on random sampling of regions of the stimuli at different spatial frequency ranges. Results Results show that accurate pain discrimination relies mostly on the frown lines and the mouth. Finally, an ideal observer analysis indicated that the use of the frown lines in human observers could not be attributed to the objective 'informativeness' of this area. Conclusions Based on a recent study suggesting that this area codes for the affective dimension of pain, we propose that the visual system has evolved to focus primarily on the facial cues that signal the aversiveness of pain, consistent with the social role of facial expressions in the communication of potential threats. © 2015 European Pain Federation-EFIC®.},
keywords = {anger, article, association, Classification, Cues, disgust, emotion, Emotions, Facial Expression, Fear, female, happiness, human, human experiment, Humans, male, nociception, normal human, Pain, pain assessment, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, procedures, random sample, reproducibility, Reproducibility of Results, sadness, statistical significance, Visual, visual information, visual stimulation},
pubstate = {published},
tppubtype = {article}
}
Joyal, C. C.; Jacob, L.; Cigna, M. -H.; Guay, J. -P.; Renaud, P.
Virtual faces expressing emotions: An initial concomitant and construct validity study Article de journal
Dans: Frontiers in Human Neuroscience, vol. 8, no SEP, p. 1–6, 2014, ISSN: 16625161, (Publisher: Frontiers Media S. A.).
Résumé | Liens | BibTeX | Étiquettes: adult, anger, article, computer program, construct validity, corrugator supercilii muscle, disgust, Electromyography, emotion, emotionality, face muscle, Facial Expression, Fear, female, gaze, happiness, human, human experiment, male, Middle Aged, muscle contraction, normal human, positive feedback, sadness, surprise, task performance, virtual reality, Young Adult, zygomatic major muscle
@article{joyal_virtual_2014,
title = {Virtual faces expressing emotions: An initial concomitant and construct validity study},
author = {C. C. Joyal and L. Jacob and M. -H. Cigna and J. -P. Guay and P. Renaud},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84933679803&doi=10.3389%2ffnhum.2014.00787&partnerID=40&md5=c51b26765fb1e2152cede99adcd519b0},
doi = {10.3389/fnhum.2014.00787},
issn = {16625161},
year = {2014},
date = {2014-01-01},
journal = {Frontiers in Human Neuroscience},
volume = {8},
number = {SEP},
pages = {1–6},
abstract = {Objectives: The goal of this study was to initially assess concomitants and construct validity of a newly developed set of virtual faces expressing six fundamental emotions (happiness, surprise, anger, sadness, fear, and disgust). Recognition rates, facial electromyography (zygomatic major and corrugator supercilii muscles), and regional gaze fixation latencies (eyes and mouth regions) were compared in 41 adult volunteers (20 ♂, 21 ♀) during the presentation of video clips depicting real vs. virtual adults expressing emotions. Background: Facial expressions of emotions represent classic stimuli for the studyofsocial cognition. Developing virtual dynamic facial expressions ofemotions, however, would open-up possibilities, both for fundamental and clinical research. For instance, virtual faces allow real-time Human–Computer retroactions between physiological measures and the virtual agent. Results: Emotions expressed by each set of stimuli were similarly recognized, both by men and women. Accordingly, both sets of stimuli elicited similar activation of facial muscles and similar ocular fixation times in eye regions from man and woman participants. Conclusion: Further validation studies can be performed with these virtual faces among clinical populations known to present social cognition difficulties. Brain–Computer Interface studies with feedback–feedforward interactions based on facial emotion expressions can also be conducted with these stimuli. © 2014 Joyal, Jacob, Cigna, Guay and Renaud.},
note = {Publisher: Frontiers Media S. A.},
keywords = {adult, anger, article, computer program, construct validity, corrugator supercilii muscle, disgust, Electromyography, emotion, emotionality, face muscle, Facial Expression, Fear, female, gaze, happiness, human, human experiment, male, Middle Aged, muscle contraction, normal human, positive feedback, sadness, surprise, task performance, virtual reality, Young Adult, zygomatic major muscle},
pubstate = {published},
tppubtype = {article}
}
Dennis, E.; Rouleau, J. -L.; Renaud, P.; Nolet, K.; Saumur, C.
A pilot development of virtual stimuli depicting affective dispositions for penile plethysmography assessment of sex offenders Article de journal
Dans: Canadian Journal of Human Sexuality, vol. 23, no 3, p. 200–208, 2014, ISSN: 11884517 (ISSN), (Publisher: University of Toronto Press Inc.).
Résumé | Liens | BibTeX | Étiquettes: adult, Affect, affective disposition, article, assessment, computer program, Facial Expression, Fear, female, happiness, heterosexuality, human, human experiment, male, normal human, penile blood flow, penile plethysmography, pilot study, Plethysmography, sadness, sex offenders, sexual arousal, Sexual Behavior, sexual crime, sexual orientation, undergraduate student, virtual reality, Virtual stimuli
@article{dennis_pilot_2014,
title = {A pilot development of virtual stimuli depicting affective dispositions for penile plethysmography assessment of sex offenders},
author = {E. Dennis and J. -L. Rouleau and P. Renaud and K. Nolet and C. Saumur},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84938522255&doi=10.3138%2fcjhs.2529&partnerID=40&md5=43cf6631bb2cd619ca0d049ae3a3b093},
doi = {10.3138/cjhs.2529},
issn = {11884517 (ISSN)},
year = {2014},
date = {2014-01-01},
journal = {Canadian Journal of Human Sexuality},
volume = {23},
number = {3},
pages = {200–208},
abstract = {There are concerns regarding the reliability, realism, and validity of stimulus materials used in the assessment of sexual interests among sex offenders. This article explores new stimulus materials for use with penile plethysmography (PPG) assessments. First, this paper presents a pilot study where undergraduate students rated virtual characters (male and female) on perceived age. In addition, the materials developed are unique in that they depict the characters exhibiting varying affective dispositions, including neutral, fearful, sad, joyful, and seductive. Participants in the first study were also asked to identify the affective disposition of the virtual characters, and results suggest that affective disposition was largely perceived as intended, especially in terms of identifying the general emotional valence of the affective dispositions (i.e., positive versus negative). In a second pilot study, we used the computer-generated images to measure sexual arousal responses in a group of non-deviant males recruited in the community. Responses measured through penile plethysmography suggest participants responded to the stimuli as expected, as the greatest amount of sexual arousal was recorded when participants were shown the adult female character. In addition, participants responded with significant arousal only when the adult female character was depicted as sexually open (joyful or seductive), rather than sexually closed or neutral. Results suggest these materials may discriminate sexual interests if applied within clinical forensic assessment of sex offenders. © 2014 by the Sex Information and Education Council of Canada.},
note = {Publisher: University of Toronto Press Inc.},
keywords = {adult, Affect, affective disposition, article, assessment, computer program, Facial Expression, Fear, female, happiness, heterosexuality, human, human experiment, male, normal human, penile blood flow, penile plethysmography, pilot study, Plethysmography, sadness, sex offenders, sexual arousal, Sexual Behavior, sexual crime, sexual orientation, undergraduate student, virtual reality, Virtual stimuli},
pubstate = {published},
tppubtype = {article}
}
Tremblay, L.; Bouchard, S.; Chebbi, B.; Wei, L.; Monthuy-Blanc, J.; Boulanger, D.
The development of a haptic virtual reality environment to study body image and affect Article de journal
Dans: Annual Review of CyberTherapy and Telemedicine, vol. 11, p. 80–84, 2013, ISSN: 15548716, (Publisher: Virtual reality med institute).
Résumé | Liens | BibTeX | Étiquettes: Affect, Arousal, art, article, Biofeedback, body image, computer interface, Emotional communications, female, happiness, Haptic devices, Haptics, human, Human bodies, human experiment, Humans, male, methodology, Mood, motor performance, physiology, psychological aspect, psychology, Psychomotor Performance, Psychophysiology, sadness, Touch, User-Computer Interface, velocity, virtual reality, Virtual-reality environment, Young Adult
@article{tremblay_development_2013,
title = {The development of a haptic virtual reality environment to study body image and affect},
author = {L. Tremblay and S. Bouchard and B. Chebbi and L. Wei and J. Monthuy-Blanc and D. Boulanger},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84894231106&partnerID=40&md5=79731f3a31e9e70fcf3bf8f5db1f7d7c},
issn = {15548716},
year = {2013},
date = {2013-01-01},
journal = {Annual Review of CyberTherapy and Telemedicine},
volume = {11},
pages = {80–84},
abstract = {We report the results of a preliminary study testing the effect of participants’ mood rating on visual motor performance using a haptic device to manipulate a cartoonish human body. Our results suggest that moods involving high arousal (e.g. happiness) produce larger movements whereas mood involving low arousal (e.g. sadness) produce slower speed of performance. Our results are used for the development of a new haptic virtual reality application that we briefly present here. This application is intended to create a more interactive and motivational environment to treat body image issues and for emotional communication. © 2013 Interactive Media Institute.},
note = {Publisher: Virtual reality med institute},
keywords = {Affect, Arousal, art, article, Biofeedback, body image, computer interface, Emotional communications, female, happiness, Haptic devices, Haptics, human, Human bodies, human experiment, Humans, male, methodology, Mood, motor performance, physiology, psychological aspect, psychology, Psychomotor Performance, Psychophysiology, sadness, Touch, User-Computer Interface, velocity, virtual reality, Virtual-reality environment, Young Adult},
pubstate = {published},
tppubtype = {article}
}