

de Recherche et d’Innovation
en Cybersécurité et Société
Marschall-Lévesque, S.; Rouleau, J. -L.; Renaud, P.
In: Archives of Sexual Behavior, vol. 47, no. 2, pp. 417–428, 2018, ISSN: 00040002, (Publisher: Springer New York LLC).
Abstract | Links | BibTeX | Tags: Acoustic Stimulation, Adolescent, adult, auditory stimulation, Child, clinical article, controlled study, crime victim, Crime Victims, Diagnosis, DSM-5, Feedback, female, human, Humans, male, Pedophilia, Penis, Photic Stimulation, photostimulation, physiology, Plethysmography, procedures, psychology, receiver operating characteristic, Sensory, sensory feedback, sex determination, Sex Offenses, sexual crime, stimulus, victim, Young Adult
@article{marschall-levesque_increasing_2018,
title = {Increasing Valid Profiles in Phallometric Assessment of Sex Offenders with Child Victims: Combining the Strengths of Audio Stimuli and Synthetic Characters},
author = {S. Marschall-Lévesque and J. -L. Rouleau and P. Renaud},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85033397289&doi=10.1007%2fs10508-017-1053-y&partnerID=40&md5=bbb82341dfebd50938aa29358f8e0e69},
doi = {10.1007/s10508-017-1053-y},
issn = {00040002},
year = {2018},
date = {2018-01-01},
journal = {Archives of Sexual Behavior},
volume = {47},
number = {2},
pages = {417–428},
abstract = {Penile plethysmography (PPG) is a measure of sexual interests that relies heavily on the stimuli it uses to generate valid results. Ethical considerations surrounding the use of real images in PPG have further limited the content admissible for these stimuli. To palliate this limitation, the current study aimed to combine audio and visual stimuli by incorporating computer-generated characters to create new stimuli capable of accurately classifying sex offenders with child victims, while also increasing the number of valid profiles. Three modalities (audio, visual, and audiovisual) were compared using two groups (15 sex offenders with child victims and 15 non-offenders). Both the new visual and audiovisual stimuli resulted in a 13% increase in the number of valid profiles at 2.5 mm, when compared to the standard audio stimuli. Furthermore, the new audiovisual stimuli generated a 34% increase in penile responses. All three modalities were able to discriminate between the two groups by their responses to the adult and child stimuli. Lastly, sexual interest indices for all three modalities could accurately classify participants in their appropriate groups, as demonstrated by ROC curve analysis (i.e., audio AUC = .81, 95% CI [.60, 1.00]; visual AUC = .84, 95% CI [.66, 1.00], and audiovisual AUC = .83, 95% CI [.63, 1.00]). Results suggest that computer-generated characters allow accurate discrimination of sex offenders with child victims and can be added to already validated stimuli to increase the number of valid profiles. The implications of audiovisual stimuli using computer-generated characters and their possible use in PPG evaluations are also discussed. © 2017, Springer Science+Business Media, LLC.},
note = {Publisher: Springer New York LLC},
keywords = {Acoustic Stimulation, Adolescent, adult, auditory stimulation, Child, clinical article, controlled study, crime victim, Crime Victims, Diagnosis, DSM-5, Feedback, female, human, Humans, male, Pedophilia, Penis, Photic Stimulation, photostimulation, physiology, Plethysmography, procedures, psychology, receiver operating characteristic, Sensory, sensory feedback, sex determination, Sex Offenses, sexual crime, stimulus, victim, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Léveillé, E.; Guay, S.; Blais, C.; Scherzer, P.; Beaumont, L. De
Sex-Related Differences in Emotion Recognition in Multi-concussed Athletes Journal Article
In: Journal of the International Neuropsychological Society, vol. 23, no. 1, pp. 65–77, 2017, ISSN: 13556177, (Publisher: Cambridge University Press).
Abstract | Links | BibTeX | Tags: adult, analysis of variance, Athletic Injuries, complication, Facial Expression, female, human, Humans, male, mood disorder, Mood Disorders, neuropsychological test, Neuropsychological Tests, Photic Stimulation, photostimulation, physiology, Post-Concussion Syndrome, postconcussion syndrome, recognition, Recognition (Psychology), Sex Characteristics, sexual characteristics, sport injury, Young Adult
@article{leveille_sex-related_2017,
title = {Sex-Related Differences in Emotion Recognition in Multi-concussed Athletes},
author = {E. Léveillé and S. Guay and C. Blais and P. Scherzer and L. De Beaumont},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85006263332&doi=10.1017%2fS1355617716001004&partnerID=40&md5=6bc93610c2ed3afe6f83ee371bb7caa4},
doi = {10.1017/S1355617716001004},
issn = {13556177},
year = {2017},
date = {2017-01-01},
journal = {Journal of the International Neuropsychological Society},
volume = {23},
number = {1},
pages = {65–77},
abstract = {Objectives: Concussion is defined as a complex pathophysiological process affecting the brain. Although the cumulative and long-term effects of multiple concussions are now well documented on cognitive and motor function, little is known about their effects on emotion recognition. Recent studies have suggested that concussion can result in emotional sequelae, particularly in females and multi-concussed athletes. The objective of this study was to investigate sex-related differences in emotion recognition in asymptomatic male and female multi-concussed athletes. Methods: We tested 28 control athletes (15 males) and 22 multi-concussed athletes (10 males) more than a year since the last concussion. Participants completed the Post-Concussion Symptom Scale, the Beck Depression Inventory-II, the Beck Anxiety Inventory, a neuropsychological test battery and a morphed emotion recognition task. Pictures of a male face expressing basic emotions (anger, disgust, fear, happiness, sadness, surprise) morphed with another emotion were randomly presented. After each face presentation, participants were asked to indicate the emotion expressed by the face. Results: Results revealed significant sex by group interactions in accuracy and intensity threshold for negative emotions, together with significant main effects of emotion and group. Conclusions: Male concussed athletes were significantly impaired in recognizing negative emotions and needed more emotional intensity to correctly identify these emotions, compared to same-sex controls. In contrast, female concussed athletes performed similarly to same-sex controls. These findings suggest that sex significantly modulates concussion effects on emotional facial expression recognition. © 2016 The International Neuropsychological Society.},
note = {Publisher: Cambridge University Press},
keywords = {adult, analysis of variance, Athletic Injuries, complication, Facial Expression, female, human, Humans, male, mood disorder, Mood Disorders, neuropsychological test, Neuropsychological Tests, Photic Stimulation, photostimulation, physiology, Post-Concussion Syndrome, postconcussion syndrome, recognition, Recognition (Psychology), Sex Characteristics, sexual characteristics, sport injury, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Daudelin-Peltier, C.; Forget, H.; Blais, C.; Deschênes, A.; Fiset, D.
The effect of acute social stress on the recognition of facial expression of emotions /631/378/2649 /631/378/1457 article Journal Article
In: Scientific Reports, vol. 7, no. 1, 2017, ISSN: 20452322, (Publisher: Nature Publishing Group).
Abstract | Links | BibTeX | Tags: adult, Anxiety, clinical study, controlled study, disgust, evolutionary adaptation, Facial Expression, Facial Recognition, human, Humans, Hydrocortisone, male, mental stress, metabolism, monitoring, pathophysiology, Pattern Recognition, Photic Stimulation, photostimulation, physiology, Psychological, reaction time, social stress, Stress, Visual, Young Adult
@article{daudelin-peltier_effect_2017,
title = {The effect of acute social stress on the recognition of facial expression of emotions /631/378/2649 /631/378/1457 article},
author = {C. Daudelin-Peltier and H. Forget and C. Blais and A. Deschênes and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85018189261&doi=10.1038%2fs41598-017-01053-3&partnerID=40&md5=4405225de4b80852d5d98a2fd2171812},
doi = {10.1038/s41598-017-01053-3},
issn = {20452322},
year = {2017},
date = {2017-01-01},
journal = {Scientific Reports},
volume = {7},
number = {1},
abstract = {This study investigates the effect of acute social stress on the recognition of facial expression of emotions in healthy young men. Participants underwent both a standardized psychosocial laboratory stressor (TSST-G) and a control condition. Then, they performed a homemade version of the facial expressions megamix. All six basic emotions were included in the task. First, our results show a systematic increase in the intensity threshold for disgust following stress, meaning that the participants' performance with this emotion was impaired. We suggest that this may reflect an adaptive coping mechanism where participants attempt to decrease their anxiety and protect themselves from a socio-evaluative threat. Second, our results show a systematic decrease in the intensity threshold for surprise, therefore positively affecting the participants' performance with that emotion. We suggest that the enhanced perception of surprise following the induction of social stress may be interpreted as an evolutionary adaptation, wherein being in a stressful environment increases the benefits of monitoring signals indicating the presence of a novel or threatening event. An alternative explanation may derive from the opposite nature of the facial expressions of disgust and surprise; the decreased recognition of disgust could therefore have fostered the propensity to perceive surprise. © 2017 The Author(s).},
note = {Publisher: Nature Publishing Group},
keywords = {adult, Anxiety, clinical study, controlled study, disgust, evolutionary adaptation, Facial Expression, Facial Recognition, human, Humans, Hydrocortisone, male, mental stress, metabolism, monitoring, pathophysiology, Pattern Recognition, Photic Stimulation, photostimulation, physiology, Psychological, reaction time, social stress, Stress, Visual, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Blais, C.; Fiset, D.; Roy, C.; Régimbald, C. S.; Gosselin, F.
Eye fixation patterns for categorizing static and dynamic facial Expressions Journal Article
In: Emotion, vol. 17, no. 7, pp. 1107–1119, 2017, ISSN: 15283542 (ISSN), (Publisher: American Psychological Association Inc.).
Abstract | Links | BibTeX | Tags: adult, anatomy and histology, Bubbles, Dynamic, emotion, Emotions, Eye, eye fixation, Eye movements, eye tracking, Facial Expression, facial expressions, female, Fixation, human, Humans, male, Mouth, Ocular, Photic Stimulation, photostimulation, physiology, PsycINFO, stimulus, Visual strategies, Young Adult
@article{blais_eye_2017,
title = {Eye fixation patterns for categorizing static and dynamic facial Expressions},
author = {C. Blais and D. Fiset and C. Roy and C. S. Régimbald and F. Gosselin},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85016754655&doi=10.1037%2femo0000283&partnerID=40&md5=baecd8d6500d1447d48a399497611ccd},
doi = {10.1037/emo0000283},
issn = {15283542 (ISSN)},
year = {2017},
date = {2017-01-01},
journal = {Emotion},
volume = {17},
number = {7},
pages = {1107–1119},
abstract = {Facial expressions of emotion are dynamic in nature, but most studies on the visual strategies underlying the recognition of facial emotions have used static stimuli. The present study directly compared the visual strategies underlying the recognition of static and dynamic facial expressions using eye tracking and the Bubbles technique. The results revealed different eye fixation patterns with the 2 kinds of stimuli, with fewer fixations on the eye and mouth area during the recognition of dynamic than static expressions. However, these differences in eye fixations were not accompanied by any systematic differences in the facial information that was actually processed to recognize the expressions. © 2017 American Psychological Association.},
note = {Publisher: American Psychological Association Inc.},
keywords = {adult, anatomy and histology, Bubbles, Dynamic, emotion, Emotions, Eye, eye fixation, Eye movements, eye tracking, Facial Expression, facial expressions, female, Fixation, human, Humans, male, Mouth, Ocular, Photic Stimulation, photostimulation, physiology, PsycINFO, stimulus, Visual strategies, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Royer, J.; Blais, C.; Barnabé-Lortie, V.; Carré, M.; Leclerc, J.; Fiset, D.
Efficient visual information for unfamiliar face matching despite viewpoint variations: It's not in the eyes! Journal Article
In: Vision Research, vol. 123, pp. 33–40, 2016, ISSN: 00426989 (ISSN), (Publisher: Elsevier Ltd).
Abstract | Links | BibTeX | Tags: accuracy, adult, article, association, attention, Bubbles, Evoked Potentials, eye fixation, Face, face profile, face recognition, Facial Recognition, facies, female, Fixation, human, human experiment, Humans, Image analysis, Individual differences, male, Ocular, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, procedures, Psychophysics, recognition, Recognition (Psychology), regression analysis, task performance, unfamiliar face matching, viewpoint variation, Viewpoint variations, Visual, visual discrimination, visual evoked potential, visual information, visual memory, visual stimulation, visual system parameters, Young Adult
@article{royer_efficient_2016,
title = {Efficient visual information for unfamiliar face matching despite viewpoint variations: It's not in the eyes!},
author = {J. Royer and C. Blais and V. Barnabé-Lortie and M. Carré and J. Leclerc and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84968779426&doi=10.1016%2fj.visres.2016.04.004&partnerID=40&md5=4c63f6eea279f7322c9af23ae9ed22c1},
doi = {10.1016/j.visres.2016.04.004},
issn = {00426989 (ISSN)},
year = {2016},
date = {2016-01-01},
journal = {Vision Research},
volume = {123},
pages = {33–40},
abstract = {Faces are encountered in highly diverse angles in real-world settings. Despite this considerable diversity, most individuals are able to easily recognize familiar faces. The vast majority of studies in the field of face recognition have nonetheless focused almost exclusively on frontal views of faces. Indeed, a number of authors have investigated the diagnostic facial features for the recognition of frontal views of faces previously encoded in this same view. However, the nature of the information useful for identity matching when the encoded face and test face differ in viewing angle remains mostly unexplored. The present study addresses this issue using individual differences and bubbles, a method that pinpoints the facial features effectively used in a visual categorization task. Our results indicate that the use of features located in the center of the face, the lower left portion of the nose area and the center of the mouth, are significantly associated with individual efficiency to generalize a face's identity across different viewpoints. However, as faces become more familiar, the reliance on this area decreases, while the diagnosticity of the eye region increases. This suggests that a certain distinction can be made between the visual mechanisms subtending viewpoint invariance and face recognition in the case of unfamiliar face identification. Our results further support the idea that the eye area may only come into play when the face stimulus is particularly familiar to the observer. © 2016 Elsevier Ltd.},
note = {Publisher: Elsevier Ltd},
keywords = {accuracy, adult, article, association, attention, Bubbles, Evoked Potentials, eye fixation, Face, face profile, face recognition, Facial Recognition, facies, female, Fixation, human, human experiment, Humans, Image analysis, Individual differences, male, Ocular, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, procedures, Psychophysics, recognition, Recognition (Psychology), regression analysis, task performance, unfamiliar face matching, viewpoint variation, Viewpoint variations, Visual, visual discrimination, visual evoked potential, visual information, visual memory, visual stimulation, visual system parameters, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Roy, C.; Blais, C.; Fiset, D.; Rainville, P.; Gosselin, F.
Efficient information for recognizing pain in facial expressions Journal Article
In: European Journal of Pain (United Kingdom), vol. 19, no. 6, pp. 852–860, 2015, ISSN: 10903801 (ISSN).
Abstract | Links | BibTeX | Tags: anger, article, association, Classification, Cues, disgust, emotion, Emotions, Facial Expression, Fear, female, happiness, human, human experiment, Humans, male, nociception, normal human, Pain, pain assessment, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, procedures, random sample, reproducibility, Reproducibility of Results, sadness, statistical significance, Visual, visual information, visual stimulation
@article{roy_efficient_2015,
title = {Efficient information for recognizing pain in facial expressions},
author = {C. Roy and C. Blais and D. Fiset and P. Rainville and F. Gosselin},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84929122739&doi=10.1002%2fejp.676&partnerID=40&md5=027f6da7b6d5c98c86de6a07766fb83d},
doi = {10.1002/ejp.676},
issn = {10903801 (ISSN)},
year = {2015},
date = {2015-01-01},
journal = {European Journal of Pain (United Kingdom)},
volume = {19},
number = {6},
pages = {852–860},
abstract = {Background The face as a visual stimulus is a reliable source of information for judging the pain experienced by others. Until now, most studies investigating the facial expression of pain have used a descriptive method (i.e. Facial Action Coding System). However, the facial features that are relevant for the observer in the identification of the expression of pain remain largely unknown despite the strong medical impact that misjudging pain can have on patients' well-being. Methods Here, we investigated this question by applying the Bubbles method. Fifty healthy volunteers were asked to categorize facial expressions (the six basic emotions, pain and neutrality) displayed in stimuli obtained from a previously validated set and presented for 500 ms each. To determine the critical areas of the face used in this categorization task, the faces were partly masked based on random sampling of regions of the stimuli at different spatial frequency ranges. Results Results show that accurate pain discrimination relies mostly on the frown lines and the mouth. Finally, an ideal observer analysis indicated that the use of the frown lines in human observers could not be attributed to the objective 'informativeness' of this area. Conclusions Based on a recent study suggesting that this area codes for the affective dimension of pain, we propose that the visual system has evolved to focus primarily on the facial cues that signal the aversiveness of pain, consistent with the social role of facial expressions in the communication of potential threats. © 2015 European Pain Federation-EFIC®.},
keywords = {anger, article, association, Classification, Cues, disgust, emotion, Emotions, Facial Expression, Fear, female, happiness, human, human experiment, Humans, male, nociception, normal human, Pain, pain assessment, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, procedures, random sample, reproducibility, Reproducibility of Results, sadness, statistical significance, Visual, visual information, visual stimulation},
pubstate = {published},
tppubtype = {article}
}
Blais, C.; Arguin, M.; Marleau, I.
Orientation invariance in visual shape perception Journal Article
In: Journal of Vision, vol. 9, no. 2, 2009, ISSN: 15347362.
Abstract | Links | BibTeX | Tags: adult, article, association, attention, Cues, Depth Perception, Form Perception, human, Humans, methodology, Orientation, Pattern Recognition, Photic Stimulation, photostimulation, physiology, Rotation, vision, Visual Perception, Young Adult
@article{blais_orientation_2009,
title = {Orientation invariance in visual shape perception},
author = {C. Blais and M. Arguin and I. Marleau},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-60649103374&doi=10.1167%2f9.2.14&partnerID=40&md5=1fb771ae6c96e3a0ad4e92a307a12d1d},
doi = {10.1167/9.2.14},
issn = {15347362},
year = {2009},
date = {2009-01-01},
journal = {Journal of Vision},
volume = {9},
number = {2},
abstract = {To assess directly the orientation-invariance of specific shape representation stages in humans, we examined whether rotation (on the image plane or in depth) modulates the conjunction and linear non-separability effects in a shape visual search task (M. Arguin & D. Saumier, 2000; D. Saumier & M. Arguin, 2003). A series of visual search experiments involving simple 2D or 3D shapes show that these target type effects are entirely resistant to both planar and depth rotations. It was found however, that resistance to depth rotation only occurred when the 3D shapes had a richly textured surface but not when they had a uniform surface, with shading as the only reliable depth cue. The results also indicate that both planar and depth rotations affected performance indexes not concerned with the target type effects (i.e. overall RTs and magnitude of display size and target presence effects). Overall, the present findings suggest that the shape representations subtending the conjunction and linear non-separability effects are invariant across both planar and depth rotations whereas other shape representation stages involved in the task are orientation-specific. © ARVO.},
keywords = {adult, article, association, attention, Cues, Depth Perception, Form Perception, human, Humans, methodology, Orientation, Pattern Recognition, Photic Stimulation, photostimulation, physiology, Rotation, vision, Visual Perception, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Fiset, D.; Blais, C.; Arguin, M.; Tadros, K.; Éthier-Majcher, C.; Bub, D.; Gosselin, F.
The spatio-temporal dynamics of visual letter recognition Journal Article
In: Cognitive Neuropsychology, vol. 26, no. 1, pp. 23–35, 2009, ISSN: 02643294.
Abstract | Links | BibTeX | Tags: article, bootstrapping, Bubbles technique, Discrimination Learning, human, Humans, Image analysis, linear regression analysis, methodology, Models, Nonlinear Dynamics, nonlinear system, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, Psychological, psychological model, reaction time, recognition, Recognition (Psychology), task performance, temporal summation, time, Time Factors, Visual, word recognition
@article{fiset_spatio-temporal_2009,
title = {The spatio-temporal dynamics of visual letter recognition},
author = {D. Fiset and C. Blais and M. Arguin and K. Tadros and C. Éthier-Majcher and D. Bub and F. Gosselin},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-67649525418&doi=10.1080%2f02643290802421160&partnerID=40&md5=bca7bda93d59994f2679faff9d93f46a},
doi = {10.1080/02643290802421160},
issn = {02643294},
year = {2009},
date = {2009-01-01},
journal = {Cognitive Neuropsychology},
volume = {26},
number = {1},
pages = {23–35},
abstract = {We applied the Bubbles technique to reveal directly the spatio-temporal features of uppercase Arial letter identification. We asked four normal readers to each identify 26,000 letters that were randomly sampled in space and time; afterwards, we performed multiple linear regressions on the participant's response accuracy and the space-time samples. We contend that each cluster of connected significant regression coefficients is a letter feature. To bridge the gap between the letter identification literature and this experiment, we also determined the relative importance of the features proposed in the letter identification literature. Results show clear modulations of the relative importance of the letter features of some letters across time, demonstrating that letter features are not always extracted simultaneously at constant speeds. Furthermore, of all the feature classes proposed in the literature, line terminations and horizontals appear to be the two most important for letter identification. © 2008 Psychology Press, an imprint of the Taylor & Francis Group.},
keywords = {article, bootstrapping, Bubbles technique, Discrimination Learning, human, Humans, Image analysis, linear regression analysis, methodology, Models, Nonlinear Dynamics, nonlinear system, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, Psychological, psychological model, reaction time, recognition, Recognition (Psychology), task performance, temporal summation, time, Time Factors, Visual, word recognition},
pubstate = {published},
tppubtype = {article}
}
Jack, R. E.; Blais, C.; Scheepers, C.; Schyns, P. G.; Caldara, R.
Cultural Confusions Show that Facial Expressions Are Not Universal Journal Article
In: Current Biology, vol. 19, no. 18, pp. 1543–1548, 2009, ISSN: 09609822 (ISSN).
Abstract | Links | BibTeX | Tags: adult, article, confusion, Cross-Cultural Comparison, cultural anthropology, Cultural Characteristics, cultural factor, Culture, emotion, Emotions, ethnology, eye movement, Eye movements, Facial Expression, Far East, female, human, human relation, Humans, Interpersonal Relations, male, Photic Stimulation, photostimulation, recognition, Recognition (Psychology), SYSNEURO, Western World
@article{jack_cultural_2009,
title = {Cultural Confusions Show that Facial Expressions Are Not Universal},
author = {R. E. Jack and C. Blais and C. Scheepers and P. G. Schyns and R. Caldara},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-70349289081&doi=10.1016%2fj.cub.2009.07.051&partnerID=40&md5=aedea29c81d3dcc7498c634bf1044e53},
doi = {10.1016/j.cub.2009.07.051},
issn = {09609822 (ISSN)},
year = {2009},
date = {2009-01-01},
journal = {Current Biology},
volume = {19},
number = {18},
pages = {1543–1548},
abstract = {Central to all human interaction is the mutual understanding of emotions, achieved primarily by a set of biologically rooted social signals evolved for this purpose-facial expressions of emotion. Although facial expressions are widely considered to be the universal language of emotion [1-3], some negative facial expressions consistently elicit lower recognition levels among Eastern compared to Western groups (see [4] for a meta-analysis and [5, 6] for review). Here, focusing on the decoding of facial expression signals, we merge behavioral and computational analyses with novel spatiotemporal analyses of eye movements, showing that Eastern observers use a culture-specific decoding strategy that is inadequate to reliably distinguish universal facial expressions of "fear" and "disgust." Rather than distributing their fixations evenly across the face as Westerners do, Eastern observers persistently fixate the eye region. Using a model information sampler, we demonstrate that by persistently fixating the eyes, Eastern observers sample ambiguous information, thus causing significant confusion. Our results question the universality of human facial expressions of emotion, highlighting their true complexity, with critical consequences for cross-cultural communication and globalization. © 2009 Elsevier Ltd. All rights reserved.},
keywords = {adult, article, confusion, Cross-Cultural Comparison, cultural anthropology, Cultural Characteristics, cultural factor, Culture, emotion, Emotions, ethnology, eye movement, Eye movements, Facial Expression, Far East, female, human, human relation, Humans, Interpersonal Relations, male, Photic Stimulation, photostimulation, recognition, Recognition (Psychology), SYSNEURO, Western World},
pubstate = {published},
tppubtype = {article}
}