

de Recherche et d’Innovation
en Cybersécurité et Société
Charbonneau, I.; Duncan, J.; Blais, C.; Guérette, J.; Plouffe-Demers, M. -P.; Smith, F.; Fiset, D.
Facial expression categorization predominantly relies on mid-spatial frequencies Article de journal
Dans: Vision Research, vol. 231, 2025, ISSN: 00426989 (ISSN), (Publisher: Elsevier Ltd).
Résumé | Liens | BibTeX | Étiquettes: adult, article, Bubbles, Classification, controlled study, emotion, Emotions, Facial Expression, facial expressions, Facial Recognition, female, human, Humans, male, physiology, Psychophysics, simulation, Spatial frequencies, Young Adult
@article{charbonneau_facial_2025,
title = {Facial expression categorization predominantly relies on mid-spatial frequencies},
author = {I. Charbonneau and J. Duncan and C. Blais and J. Guérette and M. -P. Plouffe-Demers and F. Smith and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105003427898&doi=10.1016%2fj.visres.2025.108611&partnerID=40&md5=19b14eb2487f220c3e41cbce28fa5287},
doi = {10.1016/j.visres.2025.108611},
issn = {00426989 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Vision Research},
volume = {231},
abstract = {Facial expressions are crucial in human communication. Recent decades have seen growing interest in understanding the role of spatial frequencies (SFs) in emotion perception in others. While some studies have suggested a preferential treatment of low versus high SFs, the optimal SFs for recognizing basic facial expressions remain elusive. This study, conducted on Western participants, addresses this gap using two complementary methods: a data-driven method (Exp. 1) without arbitrary SF cut-offs, and a more naturalistic method (Exp. 2) simulating variations in viewing distance. Results generally showed a preponderant role of low over high SFs, but particularly stress that facial expression categorization mostly relies on mid-range SF content (i.e. ∼6–13 cycles per face), often overlooked in previous studies. Optimal performance was observed at short to medium viewing distances (1.2–2.4 m), declining sharply with increased distance, precisely when mid-range SFs were no longer available. Additionally, our data suggest variations in SF tuning profiles across basic facial expressions and nuanced contributions from low and mid SFs in facial expression processing. Most importantly, it suggests that any method that removes mid-SF content has the downfall of offering an incomplete account of SFs diagnosticity for facial expression recognition. © 2025 The Authors},
note = {Publisher: Elsevier Ltd},
keywords = {adult, article, Bubbles, Classification, controlled study, emotion, Emotions, Facial Expression, facial expressions, Facial Recognition, female, human, Humans, male, physiology, Psychophysics, simulation, Spatial frequencies, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Blais, C.; Fiset, D.; Gingras, F.; Plouffe-Demers, M. -P.; Charbonneau, I.
Diversity in Visual Perception: How Cultural Variability in Face Processing Can Inform Policymakers Article de journal
Dans: Policy Insights from the Behavioral and Brain Sciences, vol. 11, no 2, p. 141–148, 2024, ISSN: 23727322 (ISSN), (Publisher: SAGE Publications Ltd).
Résumé | Liens | BibTeX | Étiquettes: diversity, Face processing, facial expressions, other-race effect, psychology, societal applications, Visual Perception
@article{blais_diversity_2024,
title = {Diversity in Visual Perception: How Cultural Variability in Face Processing Can Inform Policymakers},
author = {C. Blais and D. Fiset and F. Gingras and M. -P. Plouffe-Demers and I. Charbonneau},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85201022017&doi=10.1177%2f23727322241269039&partnerID=40&md5=a39ef8d37e4997448cc134104d69269d},
doi = {10.1177/23727322241269039},
issn = {23727322 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Policy Insights from the Behavioral and Brain Sciences},
volume = {11},
number = {2},
pages = {141–148},
abstract = {Psychology and behavioral sciences lack diversity in their participant samples. In visual perception, more specifically, common practice assumes that the processes studied are fundamental and universal. In contrast, cultural psychology has accumulated evidence of cultural variability in visual perception. In face processing, for instance, this cultural variability may sabotage intercultural relations. Policies aim to increase diversity in research, supporting cultural psychology, and to increase awareness among professional workforces, as well as the general population, concerning how cultural variability may influence their interpretation of another's behavior. © The Author(s) 2024.},
note = {Publisher: SAGE Publications Ltd},
keywords = {diversity, Face processing, facial expressions, other-race effect, psychology, societal applications, Visual Perception},
pubstate = {published},
tppubtype = {article}
}
Plouffe-Demers, M. -P.; Saumure, C.; Fiset, D.; Cormier, S.; Blais, C.
Facial Expression of Pain: Sex Differences in the Discrimination of Varying Intensities Article de journal
Dans: Emotion, vol. 23, no 5, p. 1254–1266, 2022, ISSN: 15283542 (ISSN), (Publisher: American Psychological Association).
Résumé | Liens | BibTeX | Étiquettes: adult, article, controlled study, data-driven methods, effect size, Empathy, Facial Expression, facial expressions, female, human, human experiment, information processing, male, normal human, Pain, pain intensity, qualitative research, sample size, sex difference, sex differences, vision, visual acuity, visual information, Visual Perception
@article{plouffe-demers_facial_2022,
title = {Facial Expression of Pain: Sex Differences in the Discrimination of Varying Intensities},
author = {M. -P. Plouffe-Demers and C. Saumure and D. Fiset and S. Cormier and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85138214204&doi=10.1037%2femo0001156&partnerID=40&md5=d5063c7ab05722c16694952ac5d53027},
doi = {10.1037/emo0001156},
issn = {15283542 (ISSN)},
year = {2022},
date = {2022-01-01},
journal = {Emotion},
volume = {23},
number = {5},
pages = {1254–1266},
abstract = {It has been proposed that women are better than men at recognizing emotions and pain experienced by others. They have also been shown to be more sensitive to variations in pain expressions. The objective of the present study was to explore the perceptual basis of these sexual differences by comparing the visual information used by men and women to discriminate between different intensities of pain facial expressions. Using the data-driven Bubbles method, we were able to corroborate the woman advantage in the discrimination of pain intensities that did not appear to be explained by variations in empathic tendencies. In terms of visual strategies, our results do not indicate any qualitative differences in the facial regions used by men and women. However, they suggest that women rely on larger regions of the face that seems to completely mediate their advantage. This utilization of larger clusters could indicate either that women integrate simultaneously and more efficiently information coming from different areas of the face or that they are more flexible in the utilization of the information present in these clusters. Women would then opt for a more holistic or flexible processing of the facial information, while men would rely on a specific yet rigid integration strategy. © 2022 American Psychological Association},
note = {Publisher: American Psychological Association},
keywords = {adult, article, controlled study, data-driven methods, effect size, Empathy, Facial Expression, facial expressions, female, human, human experiment, information processing, male, normal human, Pain, pain intensity, qualitative research, sample size, sex difference, sex differences, vision, visual acuity, visual information, Visual Perception},
pubstate = {published},
tppubtype = {article}
}
Blais, C.; Fiset, D.; Roy, C.; Régimbald, C. S.; Gosselin, F.
Eye fixation patterns for categorizing static and dynamic facial Expressions Article de journal
Dans: Emotion, vol. 17, no 7, p. 1107–1119, 2017, ISSN: 15283542 (ISSN), (Publisher: American Psychological Association Inc.).
Résumé | Liens | BibTeX | Étiquettes: adult, anatomy and histology, Bubbles, Dynamic, emotion, Emotions, Eye, eye fixation, Eye movements, eye tracking, Facial Expression, facial expressions, female, Fixation, human, Humans, male, Mouth, Ocular, Photic Stimulation, photostimulation, physiology, PsycINFO, stimulus, Visual strategies, Young Adult
@article{blais_eye_2017,
title = {Eye fixation patterns for categorizing static and dynamic facial Expressions},
author = {C. Blais and D. Fiset and C. Roy and C. S. Régimbald and F. Gosselin},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85016754655&doi=10.1037%2femo0000283&partnerID=40&md5=baecd8d6500d1447d48a399497611ccd},
doi = {10.1037/emo0000283},
issn = {15283542 (ISSN)},
year = {2017},
date = {2017-01-01},
journal = {Emotion},
volume = {17},
number = {7},
pages = {1107–1119},
abstract = {Facial expressions of emotion are dynamic in nature, but most studies on the visual strategies underlying the recognition of facial emotions have used static stimuli. The present study directly compared the visual strategies underlying the recognition of static and dynamic facial expressions using eye tracking and the Bubbles technique. The results revealed different eye fixation patterns with the 2 kinds of stimuli, with fewer fixations on the eye and mouth area during the recognition of dynamic than static expressions. However, these differences in eye fixations were not accompanied by any systematic differences in the facial information that was actually processed to recognize the expressions. © 2017 American Psychological Association.},
note = {Publisher: American Psychological Association Inc.},
keywords = {adult, anatomy and histology, Bubbles, Dynamic, emotion, Emotions, Eye, eye fixation, Eye movements, eye tracking, Facial Expression, facial expressions, female, Fixation, human, Humans, male, Mouth, Ocular, Photic Stimulation, photostimulation, physiology, PsycINFO, stimulus, Visual strategies, Young Adult},
pubstate = {published},
tppubtype = {article}
}