

de Recherche et d’Innovation
en Cybersécurité et Société
Côté, L.; Lamontagne, J.; Bellerose, A.; Blais, C.; Fiset, D.
The eyes are central to face detection: revisiting the foundations of face processing Article de journal
Dans: Vision Research, vol. 243, 2026, ISSN: 00426989 (ISSN).
Résumé | Liens | BibTeX | Étiquettes: adult, article, Black person, Bubbles, Categorization, Caucasian, Detection, emotion assessment, Faces, Facial Recognition, facies, female, human, human experiment, Image analysis, information processing, Information use, male, Noise, normal human, perception, Prosopagnosia, spatial frequency discrimination, task performance, visual discrimination, Young Adult
@article{cote_eyes_2026,
title = {The eyes are central to face detection: revisiting the foundations of face processing},
author = {L. Côté and J. Lamontagne and A. Bellerose and C. Blais and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105030389147&doi=10.1016%2Fj.visres.2026.108785&partnerID=40&md5=752aa5d9923ac60539e36118ad41e1e6},
doi = {10.1016/j.visres.2026.108785},
issn = {00426989 (ISSN)},
year = {2026},
date = {2026-01-01},
journal = {Vision Research},
volume = {243},
abstract = {Face detection feels effortless, yet it requires finely tuned computations to extract socially meaningful signals from the visual stream. Here, we used the Bubbles method to isolate the facial features and spatial frequency information that support face categorization. Across three experiments varying in task demands and visual context, the eye region consistently emerged as the most diagnostic source of information, particularly in high spatial frequencies. This finding held whether participants distinguished faces from noise, from non-face objects, or from real-world categories—suggesting that the eyes serve as an anchor point for categorization across contexts. Strikingly, this diagnostic profile mirrors that found in face identification tasks, implying that detection and recognition may rely on shared perceptual mechanisms rather than sequential, independent processes. This overlap sheds light on longstanding ambiguities in the prosopagnosia literature, indicating that detection impairments found in patients may stem from a broader failure to extract critical eye information. More broadly, our results invite a rethinking of the early stages of face processing, suggesting that detection already involves selective use of diagnostic facial features that supports recognition, emotional decoding, and social perception. © 2026 The Author(s).},
keywords = {adult, article, Black person, Bubbles, Categorization, Caucasian, Detection, emotion assessment, Faces, Facial Recognition, facies, female, human, human experiment, Image analysis, information processing, Information use, male, Noise, normal human, perception, Prosopagnosia, spatial frequency discrimination, task performance, visual discrimination, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Gingras, F.; Fiset, D.; Plouffe-Demers, M. -P.; Estéphan, A.; N’Guiamba, M.; Sun, D.; Zhang, Y.; Blais, C.
Cultural differences in spatial frequency tunings to faces do not generalize to visual scenes and object stimuli Article de journal
Dans: Psychonomic Bulletin and Review, vol. 33, no 1, 2026, ISSN: 10699384 (ISSN).
Résumé | Liens | BibTeX | Étiquettes: Adolescent, adult, Asian, Asian People, Canada, Caucasian, China, Cross-Cultural Comparison, Cross-cultural psychology, cultural factor, Depth Perception, Face processing, Facial Recognition, female, human, Humans, male, Object processing, Pattern Recognition, physiology, Scene processing, Space Perception, Spatial frequencies, Visual, visual pattern recognition, Visual Perception, White People, Young Adult
@article{gingras_cultural_2026,
title = {Cultural differences in spatial frequency tunings to faces do not generalize to visual scenes and object stimuli},
author = {F. Gingras and D. Fiset and M. -P. Plouffe-Demers and A. Estéphan and M. N’Guiamba and D. Sun and Y. Zhang and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105025378146&doi=10.3758%2Fs13423-025-02832-0&partnerID=40&md5=43840b8cfa4c2df54e647f03a452f8e5},
doi = {10.3758/s13423-025-02832-0},
issn = {10699384 (ISSN)},
year = {2026},
date = {2026-01-01},
journal = {Psychonomic Bulletin and Review},
volume = {33},
number = {1},
abstract = {Previous research has identified cultural differences in visual perception, where East Asians focus more on global object structure and display a larger breadth of attention compared with Westerners. East Asians rely on lower spatial frequencies (SFs) compared to Westerners for face recognition, which may be linked to this. Investigating whether such differences extend to other high-level stimulus categories would clarify if SF tuning differences reflect more general or face specific cognitive processes. The present study compared the SF tunings of Canadians and Chinese during object (Exp. 1; N = 50) and scene (Exp. 3; N = 47) categorization. In both experiments, results did not indicate a significant difference between groups. In Experiment 3 (N = 128), we conducted an online replication of Experiment 1 while measuring the SF tunings of the same participants during face perception. Again, no significant difference between the groups was found during object categorization, but the finding that East Asians rely on lower SF than Westerners was replicated. Together, these results suggest that unique mechanisms may underlie the cultural differences in face processing, though alternative explanations, such as the feature consistency of faces, could also account for these findings. © The Psychonomic Society, Inc. 2025.},
keywords = {Adolescent, adult, Asian, Asian People, Canada, Caucasian, China, Cross-Cultural Comparison, Cross-cultural psychology, cultural factor, Depth Perception, Face processing, Facial Recognition, female, human, Humans, male, Object processing, Pattern Recognition, physiology, Scene processing, Space Perception, Spatial frequencies, Visual, visual pattern recognition, Visual Perception, White People, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Ledrou-Paquet, V.; Fiset, D.; Carré, M.; Guérette, J.; Blais, C.
The facial information underlying economic decision-making Article de journal
Dans: Perception, vol. 55, no 3, p. 243–265, 2026, ISSN: 03010066 (ISSN).
Résumé | Liens | BibTeX | Étiquettes: Bubbles, face evaluation, face perception, facial trustworthiness, Social Perception, trust game, trustworthiness
@article{ledrou-paquet_facial_2026,
title = {The facial information underlying economic decision-making},
author = {V. Ledrou-Paquet and D. Fiset and M. Carré and J. Guérette and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105020458633&doi=10.1177%2F03010066251387848&partnerID=40&md5=7c1ead96fa13944073ed63bc952ca723},
doi = {10.1177/03010066251387848},
issn = {03010066 (ISSN)},
year = {2026},
date = {2026-01-01},
journal = {Perception},
volume = {55},
number = {3},
pages = {243–265},
abstract = {Faces are rapidly and automatically assessed on multiple social dimensions, including trustworthiness. The high inter-rater agreement on this social judgment suggests a systematic association between facial appearance and perceived trustworthiness. The facial information used by observers during explicit trustworthiness judgments has been studied before. However, it remains unknown whether the same perceptual strategies are used during decisions that involve trusting another individual, without necessitating an explicit trustworthiness judgment. To explore this, 53 participants completed the Trust Game, an economic decision task, while facial information was randomly sampled using the Bubbles method. Our results show that economic decisions based on facial cues rely on similar visual information as that used during explicit trustworthiness judgments. We then manipulated facial features identified as diagnostic for trust to test their influence on perceived trustworthiness (Experiment 2) and on trust-related behaviors (Experiment 3). Across all experiments, subtle, targeted changes to facial features systematically shifted both impressions and monetary trust decisions. These findings demonstrate that the same perceptual strategies underlie explicit judgments and trust behaviors, highlighting the applied relevance of even minimal alterations in facial appearance. These findings should be replicated with real faces from diverse demographic backgrounds to confirm their generalizability. © The Author(s) 2025. This article is distributed under the terms of the Creative Commons Attribution-NonCommercial 4.0 License (https://creativecommons.org/licenses/by-nc/4.0/) which permits non-commercial use, reproduction and distribution of the work without further permission provided the original work is attributed as specified on the SAGE and Open Access page (https://us.sagepub.com/en-us/nam/open-access-at-sage).},
keywords = {Bubbles, face evaluation, face perception, facial trustworthiness, Social Perception, trust game, trustworthiness},
pubstate = {published},
tppubtype = {article}
}
Luo, F.; Zhang, Y.; Liang, W.; Blais, C.; Demers, M. -P. Plouffe; Fiset, D.; Sun, D.; Chen, B.
Stroke features in the Chinese character recognition Article de journal
Dans: Quarterly Journal of Experimental Psychology, 2025, ISSN: 17470218 (ISSN).
Résumé | Liens | BibTeX | Étiquettes: Bubbles technique, Chinese stroke recognition, delayed-segment paradigm, script-specific adaptations, visual features
@article{luo_stroke_2025,
title = {Stroke features in the Chinese character recognition},
author = {F. Luo and Y. Zhang and W. Liang and C. Blais and M. -P. Plouffe Demers and D. Fiset and D. Sun and B. Chen},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105024609336&doi=10.1177%2F17470218251357441&partnerID=40&md5=04053a28f9602f36a971eadd4f981cdb},
doi = {10.1177/17470218251357441},
issn = {17470218 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Quarterly Journal of Experimental Psychology},
abstract = {While line vertices, terminations, and midsegments are critical for Roman letter identification, the diagnostic features of Chinese character strokes remain unclear. This study examines how local stroke-level features and global line-relation mechanisms contribute to Chinese character recognition. In Experiment 1, we applied the Bubbles classification image technique to native Chinese readers to identify diagnostic stroke features. Results revealed four key features: horizontal hooks, dots, vertical turnings, and raises. These features, while analogous to terminations in alphabetic systems, reflect unique dynamics of Chinese stroke production, marking stroke origins and terminations. Experiment 2 employed a delayed-segment paradigm to assess functional significance of these features. Greater degradation of vertices and midsegments significantly prolonged reaction times, and removal of stroke-based terminations (e.g., hooks) impaired recognition accuracy. Together, these findings support a two-tiered hierarchy in Chinese character recognition: stroke-specific terminals enable fine-grained feature discrimination, while line-relation features support global structural integration. The results affirm script-general principles (midsegments and vertices as perceptual anchors) and highlight language-specific adaptations, where stroke terminations function as dynamic positional cues. © Experimental Psychology Society 2025},
keywords = {Bubbles technique, Chinese stroke recognition, delayed-segment paradigm, script-specific adaptations, visual features},
pubstate = {published},
tppubtype = {article}
}
Charbonneau, I.; Duncan, J.; Blais, C.; Guérette, J.; Plouffe-Demers, M. -P.; Smith, F.; Fiset, D.
Facial expression categorization predominantly relies on mid-spatial frequencies Article de journal
Dans: Vision Research, vol. 231, 2025, ISSN: 00426989 (ISSN), (Publisher: Elsevier Ltd).
Résumé | Liens | BibTeX | Étiquettes: adult, article, Bubbles, Classification, controlled study, emotion, Emotions, Facial Expression, facial expressions, Facial Recognition, female, human, Humans, male, physiology, Psychophysics, simulation, Spatial frequencies, Young Adult
@article{charbonneau_facial_2025,
title = {Facial expression categorization predominantly relies on mid-spatial frequencies},
author = {I. Charbonneau and J. Duncan and C. Blais and J. Guérette and M. -P. Plouffe-Demers and F. Smith and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105003427898&doi=10.1016%2Fj.visres.2025.108611&partnerID=40&md5=508d315d8092a9142c2d82f1b774cfdb},
doi = {10.1016/j.visres.2025.108611},
issn = {00426989 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Vision Research},
volume = {231},
abstract = {Facial expressions are crucial in human communication. Recent decades have seen growing interest in understanding the role of spatial frequencies (SFs) in emotion perception in others. While some studies have suggested a preferential treatment of low versus high SFs, the optimal SFs for recognizing basic facial expressions remain elusive. This study, conducted on Western participants, addresses this gap using two complementary methods: a data-driven method (Exp. 1) without arbitrary SF cut-offs, and a more naturalistic method (Exp. 2) simulating variations in viewing distance. Results generally showed a preponderant role of low over high SFs, but particularly stress that facial expression categorization mostly relies on mid-range SF content (i.e. ∼6–13 cycles per face), often overlooked in previous studies. Optimal performance was observed at short to medium viewing distances (1.2–2.4 m), declining sharply with increased distance, precisely when mid-range SFs were no longer available. Additionally, our data suggest variations in SF tuning profiles across basic facial expressions and nuanced contributions from low and mid SFs in facial expression processing. Most importantly, it suggests that any method that removes mid-SF content has the downfall of offering an incomplete account of SFs diagnosticity for facial expression recognition. © 2025 The Authors},
note = {Publisher: Elsevier Ltd},
keywords = {adult, article, Bubbles, Classification, controlled study, emotion, Emotions, Facial Expression, facial expressions, Facial Recognition, female, human, Humans, male, physiology, Psychophysics, simulation, Spatial frequencies, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Bérubé, A.; Pétrin, R.; Boudreault, M.; Marcotte-Beaumier, G.; Blais, C.
Childhood maltreatment influences parental mimicry of children's emotional facial expressions Article de journal
Dans: Child Abuse and Neglect, vol. 170, 2025, ISSN: 01452134 (ISSN).
Résumé | Liens | BibTeX | Étiquettes: Adolescent, adult, Adult Survivors of Child Abuse, anger, article, Child, Child Abuse, child abuse survivor, child parent relation, childhood maltreatment, Childhood Trauma Questionnaire, controlled study, Diagnosis, disgust, emotion, Emotional facial expressions, emotional neglect, Emotions, Facial Expression, female, human, Humans, major clinical study, male, Mimicry, neglect, parent, Parent-Child Relations, Parenting, Parents, path analysis, physical abuse, psychological functioning, psychology, questionnaire, sadness, sexual abuse, social bonding, Surveys and Questionnaires, Young Adult
@article{berube_childhood_2025,
title = {Childhood maltreatment influences parental mimicry of children's emotional facial expressions},
author = {A. Bérubé and R. Pétrin and M. Boudreault and G. Marcotte-Beaumier and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105020986193&doi=10.1016%2Fj.chiabu.2025.107787&partnerID=40&md5=18593b82f701fc76ad054419d48dfc69},
doi = {10.1016/j.chiabu.2025.107787},
issn = {01452134 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Child Abuse and Neglect},
volume = {170},
abstract = {Background: Childhood maltreatment can disrupt socio-emotional functioning, potentially influencing how parents respond to children's emotional facial expressions. Mimicry, an automatic reaction to others' facial expressions, is a critical mechanism for social bonding and affiliation in parent-child relationships. However, the effects of childhood maltreatment on parental mimicry remain underexplored. Objective: This study examined the relationship between different forms of childhood maltreatment and parents' mimicry of children's emotional facial expressions. Participants and setting: Fifty-seven parents participated in an emotion recognition task conducted either at a local community organization or at the university laboratory. Methods: Parents' facial reactions were recorded and analyzed using FaceReader software to detect mimicry. The Childhood Trauma Questionnaire (CTQ) assessed parental history of maltreatment. A path analysis model was conducted to evaluate the associations between forms of childhood maltreatment and parental mimicry. Results: A history of physical abuse predicted increased expressions of anger, while physical neglect was linked to reduced anger but heightened disgust. Emotional and sexual abuse were associated with diminished mimicry of sadness, whereas emotional neglect predicted enhanced sadness mimicry. Conclusions: Findings suggest that childhood maltreatment alters parents' facial reactions to children's emotional facial expressions, potentially impacting parental sensitivity. © 2025},
keywords = {Adolescent, adult, Adult Survivors of Child Abuse, anger, article, Child, Child Abuse, child abuse survivor, child parent relation, childhood maltreatment, Childhood Trauma Questionnaire, controlled study, Diagnosis, disgust, emotion, Emotional facial expressions, emotional neglect, Emotions, Facial Expression, female, human, Humans, major clinical study, male, Mimicry, neglect, parent, Parent-Child Relations, Parenting, Parents, path analysis, physical abuse, psychological functioning, psychology, questionnaire, sadness, sexual abuse, social bonding, Surveys and Questionnaires, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Richer, A.; Gingras, F.; Plouffe-Demers, M. -P.; Fiset, D.; Blais, C.
Is It Pain, Anger, Disgust, or Sadness? Individual Differences in Expectations of Pain Facial Expressions Article de journal
Dans: Emotion, vol. 25, no 7, p. 1750–1763, 2025, ISSN: 15283542 (ISSN), (Publisher: American Psychological Association).
Résumé | Liens | BibTeX | Étiquettes: facial expressions confusion, Individual differences, mental representation, pain facial expression, Reverse correlation
@article{richer_is_2025,
title = {Is It Pain, Anger, Disgust, or Sadness? Individual Differences in Expectations of Pain Facial Expressions},
author = {A. Richer and F. Gingras and M. -P. Plouffe-Demers and D. Fiset and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105002303060&doi=10.1037%2femo0001516&partnerID=40&md5=3f1a8aa2bb0a38679b6fe7d354b216a2},
doi = {10.1037/emo0001516},
issn = {15283542 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Emotion},
volume = {25},
number = {7},
pages = {1750–1763},
publisher = {American Psychological Association},
abstract = {Humans rely on facial expressions to assess others’ affective states. However, pain facial expressions are poorly recognized and are often confused with other negative affective states, such as anger, disgust, sadness, and fear. Previous research has shown that individuals’ expectations about the appearance of pain facial expressions are not optimal and do not perfectly reflect the facial features typically observed in individuals expressing pain. In the present study, we verified if expectations about pain facial expressions are also suboptimal by overlapping with other affective states. We relied on two published data sets (data collected between 2017 and 2020) containing images representing the expectations of the appearance of pain facial expressions according to 162 White participants. We then asked an independent group of White participants (N = 60, 30 women},
note = {Publisher: American Psychological Association},
keywords = {facial expressions confusion, Individual differences, mental representation, pain facial expression, Reverse correlation},
pubstate = {published},
tppubtype = {article}
}
Bérubé, A.; Pearson, J.; Blais, C.; Forget, H.
Stress and emotion recognition predict the relationship between a history of maltreatment and sensitive parenting behaviors: A moderated-moderation Article de journal
Dans: Development and Psychopathology, vol. 37, no 1, p. 281–291, 2025, ISSN: 09545794 (ISSN), (Publisher: Cambridge University Press).
Résumé | Liens | BibTeX | Étiquettes: adult, Adult Survivors of Child Abuse, chemistry, Child, Child Abuse, child abuse survivor, child parent relation, childhood maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, female, human, Humans, Hydrocortisone, male, mental stress, metabolism, mother, mother child relation, Mother-Child Relations, Mothers, Parenting, physiology, Preschool, preschool child, Psychological, psychology, Saliva, sensitivity, Stress, stress reactivity
@article{berube_stress_2025,
title = {Stress and emotion recognition predict the relationship between a history of maltreatment and sensitive parenting behaviors: A moderated-moderation},
author = {A. Bérubé and J. Pearson and C. Blais and H. Forget},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85182715913&doi=10.1017%2fS095457942300158X&partnerID=40&md5=b3a9056662cf94740131bfd6fbe7352e},
doi = {10.1017/S095457942300158X},
issn = {09545794 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Development and Psychopathology},
volume = {37},
number = {1},
pages = {281–291},
publisher = {Cambridge University Press},
abstract = {Our study proposes to examine how stress and emotion recognition interact with a history of maltreatment to influence sensitive parenting behaviors. A sample of 58 mothers and their children aged between 2 and 5 years old were recruited. Parents' history of maltreatment was measured using the Child Trauma Questionnaire. An emotion recognition task was performed. Mothers identified the dominant emotion in morphed facial emotion expressions in children. Mothers and children interacted for 15 minutes. Salivary cortisol levels of mothers were collected before and after the interaction. Maternal sensitive behaviors were coded during the interaction using the Coding Interactive Behavior scheme. Results indicate that the severity of childhood maltreatment is related to less sensitive behaviors for mothers with average to good abilities in emotion recognition and lower to average increases in cortisol levels following an interaction with their children. For mothers with higher cortisol levels, there is no association between a history of maltreatment and sensitive behaviors, indicating that higher stress reactivity could act as a protective factor. Our study highlights the complex interaction between individual characteristics and environmental factors when it comes to parenting. These results argue for targeted interventions that address personal trauma. © 2024 The Author(s).},
note = {Publisher: Cambridge University Press},
keywords = {adult, Adult Survivors of Child Abuse, chemistry, Child, Child Abuse, child abuse survivor, child parent relation, childhood maltreatment, emotion, Emotion Recognition, Emotions, Facial Expression, female, human, Humans, Hydrocortisone, male, mental stress, metabolism, mother, mother child relation, Mother-Child Relations, Mothers, Parenting, physiology, Preschool, preschool child, Psychological, psychology, Saliva, sensitivity, Stress, stress reactivity},
pubstate = {published},
tppubtype = {article}
}
Blais, C.; Fiset, D.; Côté, L.; Ledrou-Paquet, V.; Charbonneau, I.
Conducting online visual psychophysics experiments: A replication assessment of two face processing studies Article de journal
Dans: Vision Research, vol. 233, 2025, ISSN: 00426989 (ISSN), (Publisher: Elsevier Ltd).
Résumé | Liens | BibTeX | Étiquettes: adult, article, cultural factor, experiment, Facial Recognition, female, geography, human, human experiment, Humans, information processing, laboratory, male, normal human, online system, Photic Stimulation, photostimulation, physiology, procedures, psychology, Psychophysics, recognition, stimulus response, vision, visual stimulation, Young Adult
@article{blais_conducting_2025,
title = {Conducting online visual psychophysics experiments: A replication assessment of two face processing studies},
author = {C. Blais and D. Fiset and L. Côté and V. Ledrou-Paquet and I. Charbonneau},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105004807446&doi=10.1016%2fj.visres.2025.108617&partnerID=40&md5=771b056e57c4d7a34ff7c56ce39a4bd2},
doi = {10.1016/j.visres.2025.108617},
issn = {00426989 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Vision Research},
volume = {233},
publisher = {Elsevier Ltd},
abstract = {In vision sciences, researchers rigorously control the testing environment and the physical properties of stimuli, making it challenging to conduct visual perception experiments online. However, online research offers key advantages, including access to larger and more diverse participant samples, helping to address the problem of underpowered studies and to enhance the generalizability of results. In face recognition research, increasing diversity is essential, especially considering evidence that cultural and geographical factors influence basic visual face processing. The present study tested a new online platform, Pack & Go from VPixx Technologies, that supports experiments written in MATLAB and Python. Two face recognition experiments based on a data-driven psychophysical method involving real-time stimulus manipulation and relying on functions from the Psychtoolbox were tested. In Experiment 1, the visual information used for face recognition was compared across four conditions that gradually reduced experimental control over the testing environment and stimulus properties. In Experiment 2, the association between face recognition abilities and information utilization was measured online and compared to lab-based results. In both experiments, results obtained in the lab and online were highly similar, demonstrating the potential of online research for vision science. © 2025 The Author(s)},
note = {Publisher: Elsevier Ltd},
keywords = {adult, article, cultural factor, experiment, Facial Recognition, female, geography, human, human experiment, Humans, information processing, laboratory, male, normal human, online system, Photic Stimulation, photostimulation, physiology, procedures, psychology, Psychophysics, recognition, stimulus response, vision, visual stimulation, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Audette, P. -L.; Côté, L.; Blais, C.; Duncan, J.; Gingras, F.; Fiset, D.
Part-based processing, but not holistic processing, predicts individual differences in face recognition abilities Article de journal
Dans: Cognition, vol. 256, 2025, ISSN: 00100277 (ISSN), (Publisher: Elsevier B.V.).
Résumé | Liens | BibTeX | Étiquettes: Adolescent, adult, article, face perception, face recognition, Facial Recognition, female, human, human experiment, Humans, Individual differences, Individuality, Integration efficiency, male, multiple linear regression analysis, physiology, psychology, Psychophysics, recognition, Young Adult
@article{audette_part-based_2025,
title = {Part-based processing, but not holistic processing, predicts individual differences in face recognition abilities},
author = {P. -L. Audette and L. Côté and C. Blais and J. Duncan and F. Gingras and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85213575434&doi=10.1016%2fj.cognition.2024.106057&partnerID=40&md5=135d2ba1bdf18648b57db0d3a93d0628},
doi = {10.1016/j.cognition.2024.106057},
issn = {00100277 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Cognition},
volume = {256},
publisher = {Elsevier B.V.},
abstract = {This study aimed to assess the roles of part-based and holistic processing for face processing ability (FPA). A psychophysical paradigm in which the efficiency at recognizing isolated or combined facial parts was used (N = 64), and holistic processing was defined as the perceptual integration from multiple parts. FPA and object processing ability were measured using a battery of tasks. A multiple linear regression including three predictors, namely perceptual integration, part-based efficiency, and object processing, explained 40 % of the variance in FPA. Most importantly, our results reveal a strong predictive relationship between part-based efficiency and FPA, a small predictive relationship between object processing ability and FPA, and no predictive relationship between perceptual integration and FPA. This result was obtained despite considerable variance in perceptual integration skills–with some participants exhibiting a highly efficient integration. These results indicate that part-based processing plays a pivotal role in FPA, whereas holistic processing does not. © 2024 The Authors},
note = {Publisher: Elsevier B.V.},
keywords = {Adolescent, adult, article, face perception, face recognition, Facial Recognition, female, human, human experiment, Humans, Individual differences, Individuality, Integration efficiency, male, multiple linear regression analysis, physiology, psychology, Psychophysics, recognition, Young Adult},
pubstate = {published},
tppubtype = {article}
}



