

de Recherche et d’Innovation
en Cybersécurité et Société
Blais, C.; Fiset, D.; Roy, C.; Régimbald, C. S.; Gosselin, F.
Eye fixation patterns for categorizing static and dynamic facial Expressions Journal Article
In: Emotion, vol. 17, no. 7, pp. 1107–1119, 2017, ISSN: 15283542 (ISSN), (Publisher: American Psychological Association Inc.).
Abstract | Links | BibTeX | Tags: adult, anatomy and histology, Bubbles, Dynamic, emotion, Emotions, Eye, eye fixation, Eye movements, eye tracking, Facial Expression, facial expressions, female, Fixation, human, Humans, male, Mouth, Ocular, Photic Stimulation, photostimulation, physiology, PsycINFO, stimulus, Visual strategies, Young Adult
@article{blais_eye_2017,
title = {Eye fixation patterns for categorizing static and dynamic facial Expressions},
author = {C. Blais and D. Fiset and C. Roy and C. S. Régimbald and F. Gosselin},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85016754655&doi=10.1037%2femo0000283&partnerID=40&md5=baecd8d6500d1447d48a399497611ccd},
doi = {10.1037/emo0000283},
issn = {15283542 (ISSN)},
year = {2017},
date = {2017-01-01},
journal = {Emotion},
volume = {17},
number = {7},
pages = {1107–1119},
abstract = {Facial expressions of emotion are dynamic in nature, but most studies on the visual strategies underlying the recognition of facial emotions have used static stimuli. The present study directly compared the visual strategies underlying the recognition of static and dynamic facial expressions using eye tracking and the Bubbles technique. The results revealed different eye fixation patterns with the 2 kinds of stimuli, with fewer fixations on the eye and mouth area during the recognition of dynamic than static expressions. However, these differences in eye fixations were not accompanied by any systematic differences in the facial information that was actually processed to recognize the expressions. © 2017 American Psychological Association.},
note = {Publisher: American Psychological Association Inc.},
keywords = {adult, anatomy and histology, Bubbles, Dynamic, emotion, Emotions, Eye, eye fixation, Eye movements, eye tracking, Facial Expression, facial expressions, female, Fixation, human, Humans, male, Mouth, Ocular, Photic Stimulation, photostimulation, physiology, PsycINFO, stimulus, Visual strategies, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Royer, J.; Blais, C.; Barnabé-Lortie, V.; Carré, M.; Leclerc, J.; Fiset, D.
Efficient visual information for unfamiliar face matching despite viewpoint variations: It's not in the eyes! Journal Article
In: Vision Research, vol. 123, pp. 33–40, 2016, ISSN: 00426989 (ISSN), (Publisher: Elsevier Ltd).
Abstract | Links | BibTeX | Tags: accuracy, adult, article, association, attention, Bubbles, Evoked Potentials, eye fixation, Face, face profile, face recognition, Facial Recognition, facies, female, Fixation, human, human experiment, Humans, Image analysis, Individual differences, male, Ocular, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, procedures, Psychophysics, recognition, Recognition (Psychology), regression analysis, task performance, unfamiliar face matching, viewpoint variation, Viewpoint variations, Visual, visual discrimination, visual evoked potential, visual information, visual memory, visual stimulation, visual system parameters, Young Adult
@article{royer_efficient_2016,
title = {Efficient visual information for unfamiliar face matching despite viewpoint variations: It's not in the eyes!},
author = {J. Royer and C. Blais and V. Barnabé-Lortie and M. Carré and J. Leclerc and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84968779426&doi=10.1016%2fj.visres.2016.04.004&partnerID=40&md5=4c63f6eea279f7322c9af23ae9ed22c1},
doi = {10.1016/j.visres.2016.04.004},
issn = {00426989 (ISSN)},
year = {2016},
date = {2016-01-01},
journal = {Vision Research},
volume = {123},
pages = {33–40},
abstract = {Faces are encountered in highly diverse angles in real-world settings. Despite this considerable diversity, most individuals are able to easily recognize familiar faces. The vast majority of studies in the field of face recognition have nonetheless focused almost exclusively on frontal views of faces. Indeed, a number of authors have investigated the diagnostic facial features for the recognition of frontal views of faces previously encoded in this same view. However, the nature of the information useful for identity matching when the encoded face and test face differ in viewing angle remains mostly unexplored. The present study addresses this issue using individual differences and bubbles, a method that pinpoints the facial features effectively used in a visual categorization task. Our results indicate that the use of features located in the center of the face, the lower left portion of the nose area and the center of the mouth, are significantly associated with individual efficiency to generalize a face's identity across different viewpoints. However, as faces become more familiar, the reliance on this area decreases, while the diagnosticity of the eye region increases. This suggests that a certain distinction can be made between the visual mechanisms subtending viewpoint invariance and face recognition in the case of unfamiliar face identification. Our results further support the idea that the eye area may only come into play when the face stimulus is particularly familiar to the observer. © 2016 Elsevier Ltd.},
note = {Publisher: Elsevier Ltd},
keywords = {accuracy, adult, article, association, attention, Bubbles, Evoked Potentials, eye fixation, Face, face profile, face recognition, Facial Recognition, facies, female, Fixation, human, human experiment, Humans, Image analysis, Individual differences, male, Ocular, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, procedures, Psychophysics, recognition, Recognition (Psychology), regression analysis, task performance, unfamiliar face matching, viewpoint variation, Viewpoint variations, Visual, visual discrimination, visual evoked potential, visual information, visual memory, visual stimulation, visual system parameters, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Blais, C.; Roy, C.; Fiset, D.; Arguin, M.; Gosselin, F.
The eyes are not the window to basic emotions Journal Article
In: Neuropsychologia, vol. 50, no. 12, pp. 2830–2838, 2012, ISSN: 00283932.
Abstract | Links | BibTeX | Tags: adult, analytic method, article, association, association cortex, cognition, Cues, Discrimination (Psychology), discriminative stimulus, dynamic stimulus, emotion, Emotions, Eye, Facial Expression, female, Fixation, human, human experiment, Humans, male, Mouth, normal human, Ocular, Pattern Recognition, Photic Stimulation, static stimulus, task performance, Visual, visual discrimination, visual information, visual memory, visual system function, Young Adult
@article{blais_eyes_2012,
title = {The eyes are not the window to basic emotions},
author = {C. Blais and C. Roy and D. Fiset and M. Arguin and F. Gosselin},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84865829171&doi=10.1016%2fj.neuropsychologia.2012.08.010&partnerID=40&md5=8a46d347f96ea9bd94bd161b6f1e8b92},
doi = {10.1016/j.neuropsychologia.2012.08.010},
issn = {00283932},
year = {2012},
date = {2012-01-01},
journal = {Neuropsychologia},
volume = {50},
number = {12},
pages = {2830–2838},
abstract = {Facial expressions are one of the most important ways to communicate our emotional state. In popular culture and in the scientific literature on face processing, the eye area is often conceived as a very important - if not the most important - cue for the recognition of facial expressions. In support of this, an underutilization of the eye area is often observed in clinical populations with a deficit in the recognition of facial expressions of emotions. Here, we used the Bubbles technique to verify which facial cue is the most important when it comes to discriminating between eight static and dynamic facial expressions (i.e., six basic emotions, pain and a neutral expression). We found that the mouth area is the most important cue for both static and dynamic facial expressions. We conducted an ideal observer analysis on the static expressions and determined that the mouth area is the most informative. However, we found an underutilization of the eye area by human participants in comparison to the ideal observer. We then demonstrated that the mouth area contains the most discriminative motions across expressions. We propose that the greater utilization of the mouth area by the human participants might come from remnants of the strategy the brain has developed with dynamic stimuli, and/or from a strategy whereby the most informative area is prioritized due to the limited capacity of the visuo-cognitive system. © 2012 Elsevier Ltd.},
keywords = {adult, analytic method, article, association, association cortex, cognition, Cues, Discrimination (Psychology), discriminative stimulus, dynamic stimulus, emotion, Emotions, Eye, Facial Expression, female, Fixation, human, human experiment, Humans, male, Mouth, normal human, Ocular, Pattern Recognition, Photic Stimulation, static stimulus, task performance, Visual, visual discrimination, visual information, visual memory, visual system function, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Blais, C.; Jack, R. E.; Scheepers, C.; Fiset, D.; Caldara, R.
Culture shapes how we look at faces Journal Article
In: PLoS ONE, vol. 3, no. 8, 2008, ISSN: 19326203 (ISSN).
Abstract | Links | BibTeX | Tags: adult, article, Asian, Asian Continental Ancestry Group, Caucasian, Classification, Cross-Cultural Comparison, cultural anthropology, cultural factor, Culture, East Asian, European Continental Ancestry Group, Eye, eye fixation, eye movement, Eye movements, Face, face asymmetry, face recognition, female, Fixation, histology, human, human experiment, Humans, Learning, male, methodology, Mouth, normal human, Nose, observer variation, Ocular, physiology, race difference, recognition, Recognition (Psychology), vision, visual memory, Visual Perception
@article{blais_culture_2008,
title = {Culture shapes how we look at faces},
author = {C. Blais and R. E. Jack and C. Scheepers and D. Fiset and R. Caldara},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-51549087752&doi=10.1371%2fjournal.pone.0003022&partnerID=40&md5=e75dcf9792dbd03fd1ef5894b81bfc4f},
doi = {10.1371/journal.pone.0003022},
issn = {19326203 (ISSN)},
year = {2008},
date = {2008-01-01},
journal = {PLoS ONE},
volume = {3},
number = {8},
abstract = {Background: Face processing, amongst many basic visual skills, is thought to be invariant across all humans. From as early as 1965, studies of eye movements have consistently revealed a systematic triangular sequence of fixations over the eyes and the mouth, suggesting that faces elicit a universal, biologically-determined information extraction pattern. Methodology/Principal Findings: Here we monitored the eye movements of Western Caucasian and East Asian observers while they learned, recognized, and categorized by race Western Caucasian and East Asian faces. Western Caucasian observers reproduced a scattered triangular pattern of fixations for faces of both races and across tasks. Contrary to intuition, East Asian observers focused more on the central region of the face. Conclusions/Significance: These results demonstrate that face processing can no longer be considered as arising from a universal series of perceptual events. The strategy employed to extract visual information from faces differs across cultures. © 2008 Blais et al.},
keywords = {adult, article, Asian, Asian Continental Ancestry Group, Caucasian, Classification, Cross-Cultural Comparison, cultural anthropology, cultural factor, Culture, East Asian, European Continental Ancestry Group, Eye, eye fixation, eye movement, Eye movements, Face, face asymmetry, face recognition, female, Fixation, histology, human, human experiment, Humans, Learning, male, methodology, Mouth, normal human, Nose, observer variation, Ocular, physiology, race difference, recognition, Recognition (Psychology), vision, visual memory, Visual Perception},
pubstate = {published},
tppubtype = {article}
}
Albert, G.; Renaud, P.; Chartier, S.; Renaud, L.; Sauvé, L.; Bouchard, S.
Scene perception, gaze behavior, and perceptual learning in virtual environments Journal Article
In: Cyberpsychology and Behavior, vol. 8, no. 6, pp. 592–600, 2005, ISSN: 10949313.
Abstract | Links | BibTeX | Tags: adult, article, behavior, controlled study, eye fixation, eye movement, female, Fixation, gaze, human, human experiment, Humans, immersion, Learning, male, mental performance, normal human, Ocular, perceptual learning, qualitative validity, User-Computer Interface, validation process, virtual reality, Visual Perception
@article{albert_scene_2005,
title = {Scene perception, gaze behavior, and perceptual learning in virtual environments},
author = {G. Albert and P. Renaud and S. Chartier and L. Renaud and L. Sauvé and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-29444459017&doi=10.1089%2fcpb.2005.8.592&partnerID=40&md5=87150bf324f101229007a48f91517702},
doi = {10.1089/cpb.2005.8.592},
issn = {10949313},
year = {2005},
date = {2005-01-01},
journal = {Cyberpsychology and Behavior},
volume = {8},
number = {6},
pages = {592–600},
abstract = {More and more immersive environments are developed to provide support for learning or training purposes. Ecological validity of such environments is usually based on learning performance comparisons between virtual environments and their genuine counterparts. Little is known about learning processes occurring in immersive environments. A new technique is proposed for testing perceptual learning during virtual immersion. This methodology relies upon eye-tracking technologies to analyze gaze behavior recorded in relation to virtual objects' features and tasks' requirements. It is proposed that perceptual learning mechanisms engaged could be detected through eye movements. In this study, nine subjects performed perceptual learning tasks in virtual immersion. Results obtained indicated that perceptual learning influences gaze behavior dynamics. More precisely, analysis revealed that fixation number and variability in fixation duration varied with perceptual learning level. Such findings could contribute in shedding light on learning mechanisms as well as providing additional support for validating virtual learning environments. © Mary Ann Liebert, Inc.},
keywords = {adult, article, behavior, controlled study, eye fixation, eye movement, female, Fixation, gaze, human, human experiment, Humans, immersion, Learning, male, mental performance, normal human, Ocular, perceptual learning, qualitative validity, User-Computer Interface, validation process, virtual reality, Visual Perception},
pubstate = {published},
tppubtype = {article}
}
Renaud, P.; Décarie, J.; Gourd, S. -P.; Paquin, L. -C.; Bouchard, S.
Eye-Tracking in Immersive Environments: A General Methodology to Analyze Affordance-Based Interactions from Oculomotor Dynamics Journal Article
In: Cyberpsychology and Behavior, vol. 6, no. 5, pp. 519–526, 2003, ISSN: 10949313 (ISSN).
Abstract | Links | BibTeX | Tags: Adaptation, article, Computer Simulation, Data Display, device, eye movement control, Eye movements, eye tracking, Fixation, Head, head movement, head position, human, human experiment, Humans, male, methodology, Models, motor performance, Movement, normal human, Ocular, perception, Physiological, Psychological, Reference Values, User-Computer Interface, virtual reality, visual information, Visual Perception, visual stimulation
@article{renaud_eye-tracking_2003,
title = {Eye-Tracking in Immersive Environments: A General Methodology to Analyze Affordance-Based Interactions from Oculomotor Dynamics},
author = {P. Renaud and J. Décarie and S. -P. Gourd and L. -C. Paquin and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-0142126405&doi=10.1089%2f109493103769710541&partnerID=40&md5=ee95606b1ed832fcc154d27b22f8bd3a},
doi = {10.1089/109493103769710541},
issn = {10949313 (ISSN)},
year = {2003},
date = {2003-01-01},
journal = {Cyberpsychology and Behavior},
volume = {6},
number = {5},
pages = {519–526},
abstract = {This paper aims at presenting a new methodology to study how perceptual and motor processes organized themselves in order to achieve invariant visual information picking-up in virtual immersions. From a head-mounted display, head and eye movements were recorded using tracking devices (magnetic and infrared) that render the six degrees-of-freedom associated with the position and orientation of head movements, and two degrees-of-freedom from one eye. We measured the continuous line of sight's deviation from a pre-selected area on a virtual stimulus. Some preliminary analyses of the dynamical properties of the emergent perceptual and motor patterns are presented as they are considered to be representative of the process of affordance extraction.},
keywords = {Adaptation, article, Computer Simulation, Data Display, device, eye movement control, Eye movements, eye tracking, Fixation, Head, head movement, head position, human, human experiment, Humans, male, methodology, Models, motor performance, Movement, normal human, Ocular, perception, Physiological, Psychological, Reference Values, User-Computer Interface, virtual reality, visual information, Visual Perception, visual stimulation},
pubstate = {published},
tppubtype = {article}
}