

de Recherche et d’Innovation
en Cybersécurité et Société
Plouffe-Demers, M. -P.; Saumure, C.; Fiset, D.; Cormier, S.; Blais, C.
Facial Expression of Pain: Sex Differences in the Discrimination of Varying Intensities Article de journal
Dans: Emotion, vol. 23, no 5, p. 1254–1266, 2022, ISSN: 15283542 (ISSN), (Publisher: American Psychological Association).
Résumé | Liens | BibTeX | Étiquettes: adult, article, controlled study, data-driven methods, effect size, Empathy, Facial Expression, facial expressions, female, human, human experiment, information processing, male, normal human, Pain, pain intensity, qualitative research, sample size, sex difference, sex differences, vision, visual acuity, visual information, Visual Perception
@article{plouffe-demers_facial_2022,
title = {Facial Expression of Pain: Sex Differences in the Discrimination of Varying Intensities},
author = {M. -P. Plouffe-Demers and C. Saumure and D. Fiset and S. Cormier and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85138214204&doi=10.1037%2femo0001156&partnerID=40&md5=d5063c7ab05722c16694952ac5d53027},
doi = {10.1037/emo0001156},
issn = {15283542 (ISSN)},
year = {2022},
date = {2022-01-01},
journal = {Emotion},
volume = {23},
number = {5},
pages = {1254–1266},
abstract = {It has been proposed that women are better than men at recognizing emotions and pain experienced by others. They have also been shown to be more sensitive to variations in pain expressions. The objective of the present study was to explore the perceptual basis of these sexual differences by comparing the visual information used by men and women to discriminate between different intensities of pain facial expressions. Using the data-driven Bubbles method, we were able to corroborate the woman advantage in the discrimination of pain intensities that did not appear to be explained by variations in empathic tendencies. In terms of visual strategies, our results do not indicate any qualitative differences in the facial regions used by men and women. However, they suggest that women rely on larger regions of the face that seems to completely mediate their advantage. This utilization of larger clusters could indicate either that women integrate simultaneously and more efficiently information coming from different areas of the face or that they are more flexible in the utilization of the information present in these clusters. Women would then opt for a more holistic or flexible processing of the facial information, while men would rely on a specific yet rigid integration strategy. © 2022 American Psychological Association},
note = {Publisher: American Psychological Association},
keywords = {adult, article, controlled study, data-driven methods, effect size, Empathy, Facial Expression, facial expressions, female, human, human experiment, information processing, male, normal human, Pain, pain intensity, qualitative research, sample size, sex difference, sex differences, vision, visual acuity, visual information, Visual Perception},
pubstate = {published},
tppubtype = {article}
}
Charbonneau, I.; Robinson, K.; Blais, C.; Fiset, D.
Implicit race attitudes modulate visual information extraction for trustworthiness judgments Article de journal
Dans: PLoS ONE, vol. 15, no 9 September, 2020, ISSN: 19326203, (Publisher: Public Library of Science).
Résumé | Liens | BibTeX | Étiquettes: adult, African American, African Americans, article, Attitude, Caucasian, decision making, Ethics, European Continental Ancestry Group, extraction, eyelash, Facial Expression, facies, female, human, Humans, Judgment, male, perception, physiology, psychology, Racism, Social Perception, Stereotyping, visual information, wrinkle, Young Adult
@article{charbonneau_implicit_2020,
title = {Implicit race attitudes modulate visual information extraction for trustworthiness judgments},
author = {I. Charbonneau and K. Robinson and C. Blais and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85091622106&doi=10.1371%2fjournal.pone.0239305&partnerID=40&md5=18ca2332affc9cb41d17afc8c450b0b4},
doi = {10.1371/journal.pone.0239305},
issn = {19326203},
year = {2020},
date = {2020-01-01},
journal = {PLoS ONE},
volume = {15},
number = {9 September},
abstract = {Black people are still considered to be one of the most stigmatized groups and have to face multiple prejudices that undermine their well-being. Assumptions and beliefs about other racial groups are quite pervasive and have been shown to impact basic social tasks such as face processing. For example, individuals with high racial prejudice conceptualize other-race faces as less trustworthy and more criminal. However, it is unknown if implicit racial bias could modulate even low-level perceptual mechanisms such as spatial frequency (SF) extraction when judging the level of trustworthiness of other-race faces. The present study showed that although similar facial features are used to judge the trustworthiness of White and Black faces, own-race faces are processed in lower SF (i.e. coarse information such as the contour of the face and blurred shapes as opposed to high SF representing fine-grained information such as eyelashes or fine wrinkles). This pattern was modulated by implicit race biases: higher implicit biases are associated with a significantly higher reliance on low SF with White than with Black faces. Copyright: © 2020 Charbonneau et al. This is an open access article distributed under the terms of the Creative Commons Attribution License, which permits unrestricted use, distribution, and reproduction in any medium, provided the original author and source are credited.},
note = {Publisher: Public Library of Science},
keywords = {adult, African American, African Americans, article, Attitude, Caucasian, decision making, Ethics, European Continental Ancestry Group, extraction, eyelash, Facial Expression, facies, female, human, Humans, Judgment, male, perception, physiology, psychology, Racism, Social Perception, Stereotyping, visual information, wrinkle, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Estéphan, A.; Fiset, D.; Saumure, C.; Plouffe-Demers, M. -P.; Zhang, Y.; Sun, D.; Blais, C.
Time course of cultural differences in spatial frequency use for face identification Article de journal
Dans: Scientific Reports, vol. 8, no 1, 2018, ISSN: 20452322, (Publisher: Nature Publishing Group).
Résumé | Liens | BibTeX | Étiquettes: adult, article, Asian Continental Ancestry Group, attention, Canada, Caucasian, European Continental Ancestry Group, eye movement, Eye movements, Facial Recognition, female, financial management, human, human experiment, Humans, male, Pattern Recognition, physiology, stimulus, Visual, visual information, Young Adult
@article{estephan_time_2018,
title = {Time course of cultural differences in spatial frequency use for face identification},
author = {A. Estéphan and D. Fiset and C. Saumure and M. -P. Plouffe-Demers and Y. Zhang and D. Sun and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85041296131&doi=10.1038%2fs41598-018-19971-1&partnerID=40&md5=b9e70f2d9ac7c641171bd6e450316846},
doi = {10.1038/s41598-018-19971-1},
issn = {20452322},
year = {2018},
date = {2018-01-01},
journal = {Scientific Reports},
volume = {8},
number = {1},
abstract = {Several previous studies of eye movements have put forward that, during face recognition, Easterners spread their attention across a greater part of their visual field than Westerners. Recently, we found that culture's effect on the perception of faces reaches mechanisms deeper than eye movements, therefore affecting the very nature of information sampled by the visual system: That is, Westerners globally rely more than Easterners on fine-grained visual information (i.e. high spatial frequencies; SFs), whereas Easterners rely more on coarse-grained visual information (i.e. low SFs). These findings suggest that culture influences basic visual processes; however, the temporal onset and dynamics of these culturespecific perceptual differences are still unknown. Here, we investigate the time course of SF use in Western Caucasian (Canadian) and East Asian (Chinese) observers during a face identification task. Firstly, our results confirm that Easterners use relatively lower SFs than Westerners, while the latter use relatively higher SFs. More importantly, our results indicate that these differences arise as early as 34 ms after stimulus onset, and remain stable through time. Our research supports the hypothesis that Westerners and Easterners initially rely on different types of visual information during face processing. © The Author(s) 2018.},
note = {Publisher: Nature Publishing Group},
keywords = {adult, article, Asian Continental Ancestry Group, attention, Canada, Caucasian, European Continental Ancestry Group, eye movement, Eye movements, Facial Recognition, female, financial management, human, human experiment, Humans, male, Pattern Recognition, physiology, stimulus, Visual, visual information, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Royer, J.; Blais, C.; Charbonneau, I.; Déry, K.; Tardif, J.; Duchaine, B.; Gosselin, F.; Fiset, D.
Greater reliance on the eye region predicts better face recognition ability Article de journal
Dans: Cognition, vol. 181, p. 12–20, 2018, ISSN: 00100277, (Publisher: Elsevier B.V.).
Résumé | Liens | BibTeX | Étiquettes: Adolescent, adult, article, clinical article, extraction, Eye, Facial Recognition, female, human, human experiment, Humans, male, recognition, Recognition (Psychology), visual information, Young Adult
@article{royer_greater_2018,
title = {Greater reliance on the eye region predicts better face recognition ability},
author = {J. Royer and C. Blais and I. Charbonneau and K. Déry and J. Tardif and B. Duchaine and F. Gosselin and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85051252949&doi=10.1016%2fj.cognition.2018.08.004&partnerID=40&md5=e1af5e939ec7381c82ff3d13d1c3cc51},
doi = {10.1016/j.cognition.2018.08.004},
issn = {00100277},
year = {2018},
date = {2018-01-01},
journal = {Cognition},
volume = {181},
pages = {12–20},
abstract = {Interest in using individual differences in face recognition ability to better understand the perceptual and cognitive mechanisms supporting face processing has grown substantially in recent years. The goal of this study was to determine how varying levels of face recognition ability are linked to changes in visual information extraction strategies in an identity recognition task. To address this question, fifty participants completed six tasks measuring face and object processing abilities. Using the Bubbles method (Gosselin & Schyns, 2001), we also measured each individual's use of visual information in face recognition. At the group level, our results replicate previous findings demonstrating the importance of the eye region for face identification. More importantly, we show that face processing ability is related to a systematic increase in the use of the eye area, especially the left eye from the observer's perspective. Indeed, our results suggest that the use of this region accounts for approximately 20% of the variance in face processing ability. These results support the idea that individual differences in face processing are at least partially related to the perceptual extraction strategy used during face identification. © 2018 Elsevier B.V.},
note = {Publisher: Elsevier B.V.},
keywords = {Adolescent, adult, article, clinical article, extraction, Eye, Facial Recognition, female, human, human experiment, Humans, male, recognition, Recognition (Psychology), visual information, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Royer, J.; Blais, C.; Barnabé-Lortie, V.; Carré, M.; Leclerc, J.; Fiset, D.
Efficient visual information for unfamiliar face matching despite viewpoint variations: It's not in the eyes! Article de journal
Dans: Vision Research, vol. 123, p. 33–40, 2016, ISSN: 00426989 (ISSN), (Publisher: Elsevier Ltd).
Résumé | Liens | BibTeX | Étiquettes: accuracy, adult, article, association, attention, Bubbles, Evoked Potentials, eye fixation, Face, face profile, face recognition, Facial Recognition, facies, female, Fixation, human, human experiment, Humans, Image analysis, Individual differences, male, Ocular, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, procedures, Psychophysics, recognition, Recognition (Psychology), regression analysis, task performance, unfamiliar face matching, viewpoint variation, Viewpoint variations, Visual, visual discrimination, visual evoked potential, visual information, visual memory, visual stimulation, visual system parameters, Young Adult
@article{royer_efficient_2016,
title = {Efficient visual information for unfamiliar face matching despite viewpoint variations: It's not in the eyes!},
author = {J. Royer and C. Blais and V. Barnabé-Lortie and M. Carré and J. Leclerc and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84968779426&doi=10.1016%2fj.visres.2016.04.004&partnerID=40&md5=4c63f6eea279f7322c9af23ae9ed22c1},
doi = {10.1016/j.visres.2016.04.004},
issn = {00426989 (ISSN)},
year = {2016},
date = {2016-01-01},
journal = {Vision Research},
volume = {123},
pages = {33–40},
abstract = {Faces are encountered in highly diverse angles in real-world settings. Despite this considerable diversity, most individuals are able to easily recognize familiar faces. The vast majority of studies in the field of face recognition have nonetheless focused almost exclusively on frontal views of faces. Indeed, a number of authors have investigated the diagnostic facial features for the recognition of frontal views of faces previously encoded in this same view. However, the nature of the information useful for identity matching when the encoded face and test face differ in viewing angle remains mostly unexplored. The present study addresses this issue using individual differences and bubbles, a method that pinpoints the facial features effectively used in a visual categorization task. Our results indicate that the use of features located in the center of the face, the lower left portion of the nose area and the center of the mouth, are significantly associated with individual efficiency to generalize a face's identity across different viewpoints. However, as faces become more familiar, the reliance on this area decreases, while the diagnosticity of the eye region increases. This suggests that a certain distinction can be made between the visual mechanisms subtending viewpoint invariance and face recognition in the case of unfamiliar face identification. Our results further support the idea that the eye area may only come into play when the face stimulus is particularly familiar to the observer. © 2016 Elsevier Ltd.},
note = {Publisher: Elsevier Ltd},
keywords = {accuracy, adult, article, association, attention, Bubbles, Evoked Potentials, eye fixation, Face, face profile, face recognition, Facial Recognition, facies, female, Fixation, human, human experiment, Humans, Image analysis, Individual differences, male, Ocular, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, procedures, Psychophysics, recognition, Recognition (Psychology), regression analysis, task performance, unfamiliar face matching, viewpoint variation, Viewpoint variations, Visual, visual discrimination, visual evoked potential, visual information, visual memory, visual stimulation, visual system parameters, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Roy, C.; Blais, C.; Fiset, D.; Rainville, P.; Gosselin, F.
Efficient information for recognizing pain in facial expressions Article de journal
Dans: European Journal of Pain (United Kingdom), vol. 19, no 6, p. 852–860, 2015, ISSN: 10903801 (ISSN).
Résumé | Liens | BibTeX | Étiquettes: anger, article, association, Classification, Cues, disgust, emotion, Emotions, Facial Expression, Fear, female, happiness, human, human experiment, Humans, male, nociception, normal human, Pain, pain assessment, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, procedures, random sample, reproducibility, Reproducibility of Results, sadness, statistical significance, Visual, visual information, visual stimulation
@article{roy_efficient_2015,
title = {Efficient information for recognizing pain in facial expressions},
author = {C. Roy and C. Blais and D. Fiset and P. Rainville and F. Gosselin},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84929122739&doi=10.1002%2fejp.676&partnerID=40&md5=027f6da7b6d5c98c86de6a07766fb83d},
doi = {10.1002/ejp.676},
issn = {10903801 (ISSN)},
year = {2015},
date = {2015-01-01},
journal = {European Journal of Pain (United Kingdom)},
volume = {19},
number = {6},
pages = {852–860},
abstract = {Background The face as a visual stimulus is a reliable source of information for judging the pain experienced by others. Until now, most studies investigating the facial expression of pain have used a descriptive method (i.e. Facial Action Coding System). However, the facial features that are relevant for the observer in the identification of the expression of pain remain largely unknown despite the strong medical impact that misjudging pain can have on patients' well-being. Methods Here, we investigated this question by applying the Bubbles method. Fifty healthy volunteers were asked to categorize facial expressions (the six basic emotions, pain and neutrality) displayed in stimuli obtained from a previously validated set and presented for 500 ms each. To determine the critical areas of the face used in this categorization task, the faces were partly masked based on random sampling of regions of the stimuli at different spatial frequency ranges. Results Results show that accurate pain discrimination relies mostly on the frown lines and the mouth. Finally, an ideal observer analysis indicated that the use of the frown lines in human observers could not be attributed to the objective 'informativeness' of this area. Conclusions Based on a recent study suggesting that this area codes for the affective dimension of pain, we propose that the visual system has evolved to focus primarily on the facial cues that signal the aversiveness of pain, consistent with the social role of facial expressions in the communication of potential threats. © 2015 European Pain Federation-EFIC®.},
keywords = {anger, article, association, Classification, Cues, disgust, emotion, Emotions, Facial Expression, Fear, female, happiness, human, human experiment, Humans, male, nociception, normal human, Pain, pain assessment, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, procedures, random sample, reproducibility, Reproducibility of Results, sadness, statistical significance, Visual, visual information, visual stimulation},
pubstate = {published},
tppubtype = {article}
}
Blais, C.; Roy, C.; Fiset, D.; Arguin, M.; Gosselin, F.
The eyes are not the window to basic emotions Article de journal
Dans: Neuropsychologia, vol. 50, no 12, p. 2830–2838, 2012, ISSN: 00283932.
Résumé | Liens | BibTeX | Étiquettes: adult, analytic method, article, association, association cortex, cognition, Cues, Discrimination (Psychology), discriminative stimulus, dynamic stimulus, emotion, Emotions, Eye, Facial Expression, female, Fixation, human, human experiment, Humans, male, Mouth, normal human, Ocular, Pattern Recognition, Photic Stimulation, static stimulus, task performance, Visual, visual discrimination, visual information, visual memory, visual system function, Young Adult
@article{blais_eyes_2012,
title = {The eyes are not the window to basic emotions},
author = {C. Blais and C. Roy and D. Fiset and M. Arguin and F. Gosselin},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84865829171&doi=10.1016%2fj.neuropsychologia.2012.08.010&partnerID=40&md5=8a46d347f96ea9bd94bd161b6f1e8b92},
doi = {10.1016/j.neuropsychologia.2012.08.010},
issn = {00283932},
year = {2012},
date = {2012-01-01},
journal = {Neuropsychologia},
volume = {50},
number = {12},
pages = {2830–2838},
abstract = {Facial expressions are one of the most important ways to communicate our emotional state. In popular culture and in the scientific literature on face processing, the eye area is often conceived as a very important - if not the most important - cue for the recognition of facial expressions. In support of this, an underutilization of the eye area is often observed in clinical populations with a deficit in the recognition of facial expressions of emotions. Here, we used the Bubbles technique to verify which facial cue is the most important when it comes to discriminating between eight static and dynamic facial expressions (i.e., six basic emotions, pain and a neutral expression). We found that the mouth area is the most important cue for both static and dynamic facial expressions. We conducted an ideal observer analysis on the static expressions and determined that the mouth area is the most informative. However, we found an underutilization of the eye area by human participants in comparison to the ideal observer. We then demonstrated that the mouth area contains the most discriminative motions across expressions. We propose that the greater utilization of the mouth area by the human participants might come from remnants of the strategy the brain has developed with dynamic stimuli, and/or from a strategy whereby the most informative area is prioritized due to the limited capacity of the visuo-cognitive system. © 2012 Elsevier Ltd.},
keywords = {adult, analytic method, article, association, association cortex, cognition, Cues, Discrimination (Psychology), discriminative stimulus, dynamic stimulus, emotion, Emotions, Eye, Facial Expression, female, Fixation, human, human experiment, Humans, male, Mouth, normal human, Ocular, Pattern Recognition, Photic Stimulation, static stimulus, task performance, Visual, visual discrimination, visual information, visual memory, visual system function, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Allili, M. S.; Ziou, D.
Object tracking in videos using adaptive mixture models and active contours Article de journal
Dans: Neurocomputing, vol. 71, no 10-12, p. 2001–2011, 2008, ISSN: 09252312.
Résumé | Liens | BibTeX | Étiquettes: Active contours, algorithm, Algorithms, article, controlled study, Image analysis, Image processing, imaging system, Level set method, Mathematical models, motion analysis system, Object recognition, priority journal, Set theory, statistical model, Video cameras, Video sequences, videorecording, visual information
@article{allili_object_2008,
title = {Object tracking in videos using adaptive mixture models and active contours},
author = {M. S. Allili and D. Ziou},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-44649197137&doi=10.1016%2fj.neucom.2007.10.019&partnerID=40&md5=a2aef677fae1b220f68c9fd720be3fd5},
doi = {10.1016/j.neucom.2007.10.019},
issn = {09252312},
year = {2008},
date = {2008-01-01},
journal = {Neurocomputing},
volume = {71},
number = {10-12},
pages = {2001–2011},
abstract = {In this paper, we propose a novel object tracking algorithm for video sequences, based on active contours. The tracking is based on matching the object appearance model between successive frames of the sequence using active contours. We formulate the tracking as a minimization of an objective function incorporating region, boundary and shape information. Further, in order to handle variation in object appearance due to self-shadowing, changing illumination conditions and camera geometry, we propose an adaptive mixture model for the object representation. The implementation of the method is based on the level set method. We validate our approach on tracking examples using real video sequences, with comparison to two recent state-of-the-art methods. © 2008 Elsevier B.V. All rights reserved.},
keywords = {Active contours, algorithm, Algorithms, article, controlled study, Image analysis, Image processing, imaging system, Level set method, Mathematical models, motion analysis system, Object recognition, priority journal, Set theory, statistical model, Video cameras, Video sequences, videorecording, visual information},
pubstate = {published},
tppubtype = {article}
}
Renaud, P.; Décarie, J.; Gourd, S. -P.; Paquin, L. -C.; Bouchard, S.
Eye-Tracking in Immersive Environments: A General Methodology to Analyze Affordance-Based Interactions from Oculomotor Dynamics Article de journal
Dans: Cyberpsychology and Behavior, vol. 6, no 5, p. 519–526, 2003, ISSN: 10949313 (ISSN).
Résumé | Liens | BibTeX | Étiquettes: Adaptation, article, Computer Simulation, Data Display, device, eye movement control, Eye movements, eye tracking, Fixation, Head, head movement, head position, human, human experiment, Humans, male, methodology, Models, motor performance, Movement, normal human, Ocular, perception, Physiological, Psychological, Reference Values, User-Computer Interface, virtual reality, visual information, Visual Perception, visual stimulation
@article{renaud_eye-tracking_2003,
title = {Eye-Tracking in Immersive Environments: A General Methodology to Analyze Affordance-Based Interactions from Oculomotor Dynamics},
author = {P. Renaud and J. Décarie and S. -P. Gourd and L. -C. Paquin and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-0142126405&doi=10.1089%2f109493103769710541&partnerID=40&md5=ee95606b1ed832fcc154d27b22f8bd3a},
doi = {10.1089/109493103769710541},
issn = {10949313 (ISSN)},
year = {2003},
date = {2003-01-01},
journal = {Cyberpsychology and Behavior},
volume = {6},
number = {5},
pages = {519–526},
abstract = {This paper aims at presenting a new methodology to study how perceptual and motor processes organized themselves in order to achieve invariant visual information picking-up in virtual immersions. From a head-mounted display, head and eye movements were recorded using tracking devices (magnetic and infrared) that render the six degrees-of-freedom associated with the position and orientation of head movements, and two degrees-of-freedom from one eye. We measured the continuous line of sight's deviation from a pre-selected area on a virtual stimulus. Some preliminary analyses of the dynamical properties of the emergent perceptual and motor patterns are presented as they are considered to be representative of the process of affordance extraction.},
keywords = {Adaptation, article, Computer Simulation, Data Display, device, eye movement control, Eye movements, eye tracking, Fixation, Head, head movement, head position, human, human experiment, Humans, male, methodology, Models, motor performance, Movement, normal human, Ocular, perception, Physiological, Psychological, Reference Values, User-Computer Interface, virtual reality, visual information, Visual Perception, visual stimulation},
pubstate = {published},
tppubtype = {article}
}