

de Recherche et d’Innovation
en Cybersécurité et Société
Blais, C.; Fiset, D.; Furumoto-Deshaies, H.; Kunz, M.; Seuss, D.; Cormier, S.
Facial Features Underlying the Decoding of Pain Expressions Journal Article
In: Journal of Pain, vol. 20, no. 6, pp. 728–738, 2019, ISSN: 15265900 (ISSN), (Publisher: Churchill Livingstone Inc.).
Abstract | Links | BibTeX | Tags: adult, article, attention, decoding, Eye, Facial Expression, facies, female, human, human experiment, Humans, lip, male, Memory, Nose, Pain, pain assessment, pain dimensions, pain measurement, sensory analysis, wrinkle, Young Adult
@article{blais_facial_2019,
title = {Facial Features Underlying the Decoding of Pain Expressions},
author = {C. Blais and D. Fiset and H. Furumoto-Deshaies and M. Kunz and D. Seuss and S. Cormier},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85060707205&doi=10.1016%2fj.jpain.2019.01.002&partnerID=40&md5=2d2dd305de430a7ce8973644f57a4996},
doi = {10.1016/j.jpain.2019.01.002},
issn = {15265900 (ISSN)},
year = {2019},
date = {2019-01-01},
journal = {Journal of Pain},
volume = {20},
number = {6},
pages = {728–738},
abstract = {Previous research has revealed that the face is a finely tuned medium for pain communication. Studies assessing the decoding of facial expressions of pain have revealed an interesting discrepancy, namely that, despite eyes narrowing being the most frequent facial expression accompanying pain, individuals mostly rely on brow lowering and nose wrinkling/upper lip raising to evaluate pain. The present study verifies if this discrepancy may reflect an interaction between the features coding pain expressions and the features used by observers and stored in their mental representations. Experiment 1 shows that more weight is allocated to the brow lowering and nose wrinkling/upper lip raising, supporting the idea that these features are allocated more importance when mental representations of pain expressions are stored in memory. These 2 features have been associated with negative valence and with the affective dimension of pain, whereas the eyes narrowing feature has been associated more closely with the sensory dimension of pain. However, experiment 2 shows that these 2 features remain more salient than eyes narrowing, even when attention is specifically directed toward the sensory dimension of pain. Together, these results suggest that the features most saliently coded in the mental representation of facial expressions of pain may reflect a bias toward allocating more weight to the affective information encoded in the face. Perspective: This work reveals the relative importance of 3 facial features representing the core of pain expressions during pain decoding. The results show that 2 features are over-represented; this finding may potentially be linked with the estimation biases occurring when clinicians and lay persons evaluate pain based on facial appearance. © 2019 the American Pain Society},
note = {Publisher: Churchill Livingstone Inc.},
keywords = {adult, article, attention, decoding, Eye, Facial Expression, facies, female, human, human experiment, Humans, lip, male, Memory, Nose, Pain, pain assessment, pain dimensions, pain measurement, sensory analysis, wrinkle, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Blais, C.; Jack, R. E.; Scheepers, C.; Fiset, D.; Caldara, R.
Culture shapes how we look at faces Journal Article
In: PLoS ONE, vol. 3, no. 8, 2008, ISSN: 19326203 (ISSN).
Abstract | Links | BibTeX | Tags: adult, article, Asian, Asian Continental Ancestry Group, Caucasian, Classification, Cross-Cultural Comparison, cultural anthropology, cultural factor, Culture, East Asian, European Continental Ancestry Group, Eye, eye fixation, eye movement, Eye movements, Face, face asymmetry, face recognition, female, Fixation, histology, human, human experiment, Humans, Learning, male, methodology, Mouth, normal human, Nose, observer variation, Ocular, physiology, race difference, recognition, Recognition (Psychology), vision, visual memory, Visual Perception
@article{blais_culture_2008,
title = {Culture shapes how we look at faces},
author = {C. Blais and R. E. Jack and C. Scheepers and D. Fiset and R. Caldara},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-51549087752&doi=10.1371%2fjournal.pone.0003022&partnerID=40&md5=e75dcf9792dbd03fd1ef5894b81bfc4f},
doi = {10.1371/journal.pone.0003022},
issn = {19326203 (ISSN)},
year = {2008},
date = {2008-01-01},
journal = {PLoS ONE},
volume = {3},
number = {8},
abstract = {Background: Face processing, amongst many basic visual skills, is thought to be invariant across all humans. From as early as 1965, studies of eye movements have consistently revealed a systematic triangular sequence of fixations over the eyes and the mouth, suggesting that faces elicit a universal, biologically-determined information extraction pattern. Methodology/Principal Findings: Here we monitored the eye movements of Western Caucasian and East Asian observers while they learned, recognized, and categorized by race Western Caucasian and East Asian faces. Western Caucasian observers reproduced a scattered triangular pattern of fixations for faces of both races and across tasks. Contrary to intuition, East Asian observers focused more on the central region of the face. Conclusions/Significance: These results demonstrate that face processing can no longer be considered as arising from a universal series of perceptual events. The strategy employed to extract visual information from faces differs across cultures. © 2008 Blais et al.},
keywords = {adult, article, Asian, Asian Continental Ancestry Group, Caucasian, Classification, Cross-Cultural Comparison, cultural anthropology, cultural factor, Culture, East Asian, European Continental Ancestry Group, Eye, eye fixation, eye movement, Eye movements, Face, face asymmetry, face recognition, female, Fixation, histology, human, human experiment, Humans, Learning, male, methodology, Mouth, normal human, Nose, observer variation, Ocular, physiology, race difference, recognition, Recognition (Psychology), vision, visual memory, Visual Perception},
pubstate = {published},
tppubtype = {article}
}