

de Recherche et d’Innovation
en Cybersécurité et Société
Côté, L.; Lamontagne, J.; Bellerose, A.; Blais, C.; Fiset, D.
The eyes are central to face detection: revisiting the foundations of face processing Journal Article
In: Vision Research, vol. 243, 2026, ISSN: 00426989 (ISSN).
Abstract | Links | BibTeX | Tags: adult, article, Black person, Bubbles, Categorization, Caucasian, Detection, emotion assessment, Faces, Facial Recognition, facies, female, human, human experiment, Image analysis, information processing, Information use, male, Noise, normal human, perception, Prosopagnosia, spatial frequency discrimination, task performance, visual discrimination, Young Adult
@article{cote_eyes_2026,
title = {The eyes are central to face detection: revisiting the foundations of face processing},
author = {L. Côté and J. Lamontagne and A. Bellerose and C. Blais and D. Fiset},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105030389147&doi=10.1016%2Fj.visres.2026.108785&partnerID=40&md5=752aa5d9923ac60539e36118ad41e1e6},
doi = {10.1016/j.visres.2026.108785},
issn = {00426989 (ISSN)},
year = {2026},
date = {2026-01-01},
journal = {Vision Research},
volume = {243},
abstract = {Face detection feels effortless, yet it requires finely tuned computations to extract socially meaningful signals from the visual stream. Here, we used the Bubbles method to isolate the facial features and spatial frequency information that support face categorization. Across three experiments varying in task demands and visual context, the eye region consistently emerged as the most diagnostic source of information, particularly in high spatial frequencies. This finding held whether participants distinguished faces from noise, from non-face objects, or from real-world categories—suggesting that the eyes serve as an anchor point for categorization across contexts. Strikingly, this diagnostic profile mirrors that found in face identification tasks, implying that detection and recognition may rely on shared perceptual mechanisms rather than sequential, independent processes. This overlap sheds light on longstanding ambiguities in the prosopagnosia literature, indicating that detection impairments found in patients may stem from a broader failure to extract critical eye information. More broadly, our results invite a rethinking of the early stages of face processing, suggesting that detection already involves selective use of diagnostic facial features that supports recognition, emotional decoding, and social perception. © 2026 The Author(s).},
keywords = {adult, article, Black person, Bubbles, Categorization, Caucasian, Detection, emotion assessment, Faces, Facial Recognition, facies, female, human, human experiment, Image analysis, information processing, Information use, male, Noise, normal human, perception, Prosopagnosia, spatial frequency discrimination, task performance, visual discrimination, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Onita, C. A.; Matei, D. -V.; Chelarasu, E.; Lupu, R. G.; Petrescu-Miron, D.; Visnevschi, A.; Vudu, S.; Corciova, C.; Fuior, R.; Tupita, N.; Bouchard, S.; Mocanu, V.
In: Nutrients, vol. 17, no. 24, 2025, ISSN: 20726643 (ISSN).
Abstract | Links | BibTeX | Tags: acute stress, Adolescent, Adolescents, adult, article, controlled study, craving, decision making, Eating, eating behavior, ecological validity, electrocardiogram, electrocardiogram (ECG) parameters, Electrocardiography, feeding behavior, female, food craving, food preference, Food Preferences, Heart Rate, human, Humans, hyperphagia, male, mental stress, motivation, normal human, overnutrition, pathophysiology, Perceived Stress Scale, personalized nutrition, physiological stress, physiology, PQ interval, Psychological, psychology, QTc interval, questionnaire, reward, simulation, social stress, Stress, supermarket, Surveys and Questionnaires, three-factor eating questionnaire (TFEQ), Three-Factor-Eating-Questionnaire, Trier Social Stress Test, virtual reality, virtual supermarket, visual analog scale
@article{onita_virtual_2025,
title = {Virtual Reality Trier Social Stress and Virtual Supermarket Exposure: Electrocardiogram Correlates of Food Craving and Eating Traits in Adolescents},
author = {C. A. Onita and D. -V. Matei and E. Chelarasu and R. G. Lupu and D. Petrescu-Miron and A. Visnevschi and S. Vudu and C. Corciova and R. Fuior and N. Tupita and S. Bouchard and V. Mocanu},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105026068857&doi=10.3390%2Fnu17243924&partnerID=40&md5=fde16e892b1a18284dc51ac869ba8ee9},
doi = {10.3390/nu17243924},
issn = {20726643 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Nutrients},
volume = {17},
number = {24},
abstract = {Background/Objectives: Acute stress is known to influence food-related motivation and decision-making, often promoting a preference for energy-dense, palatable foods. However, traditional laboratory paradigms have limited ecological validity. This study examined the relationship between stress-induced physiological changes, eating behavior traits, and food cravings using a virtual reality (VR) adaptation of the Trier Social Stress Test (VR-TSST) followed by a VR supermarket task in adolescents. Methods: Thirty-eight adolescents (mean age 15.8 ± 0.6 years) participated in the study. Physiological parameters (HR, QT, PQ intervals) were recorded pre- and post-stress using a portable ECG device (WIWE). Perceived stress and eating behavior traits were evaluated with the Perceived Stress Scale (PSS) and the Three-Factor Eating Questionnaire (TFEQ-R21C), respectively. Immediately after the VR-TSST, participants performed a VR supermarket task in which they rated cravings for sweet, fatty, and healthy foods using visual analog scales (VAS). Paired-samples t-tests examined pre–post changes in physiological parameters, partial correlations explored associations between ECG responses and eating traits, and a 2 × 3 mixed-model Repeated Measures ANOVA assessed the effects of food type (sweet, fatty, healthy) and uncontrolled eating (UE) group (low vs. high) on post-stress cravings. Results: Acute stress induced significant increases in HR and QTc intervals (p < 0.01), confirming a robust physiological stress response. The ANOVA revealed a strong main effect of food type (F(1.93, 435.41) = 168.98, p < 0.001, η2p = 0.43), indicating that stress-induced cravings differed across food categories, with sweet foods rated highest. A significant food type × UE group interaction (F(1.93, 435.41) = 16.49, p < 0.001, η2p = 0.07) showed that adolescents with high UE exhibited greater cravings for sweet and fatty foods than those with low UE. Overall, craving levels did not differ significantly between groups. Conclusions: The findings demonstrate that acute stress selectively enhances cravings for high-reward foods, and that this effect is modulated by baseline uncontrolled eating tendencies. The combined use of VR-based stress induction and VR supermarket simulation offers an innovative, ecologically valid framework for studying stress-related eating behavior in adolescents, with potential implications for personalized nutrition and the prevention of stress-induced overeating. © 2025 by the authors.},
keywords = {acute stress, Adolescent, Adolescents, adult, article, controlled study, craving, decision making, Eating, eating behavior, ecological validity, electrocardiogram, electrocardiogram (ECG) parameters, Electrocardiography, feeding behavior, female, food craving, food preference, Food Preferences, Heart Rate, human, Humans, hyperphagia, male, mental stress, motivation, normal human, overnutrition, pathophysiology, Perceived Stress Scale, personalized nutrition, physiological stress, physiology, PQ interval, Psychological, psychology, QTc interval, questionnaire, reward, simulation, social stress, Stress, supermarket, Surveys and Questionnaires, three-factor eating questionnaire (TFEQ), Three-Factor-Eating-Questionnaire, Trier Social Stress Test, virtual reality, virtual supermarket, visual analog scale},
pubstate = {published},
tppubtype = {article}
}
Blais, C.; Fiset, D.; Côté, L.; Ledrou-Paquet, V.; Charbonneau, I.
Conducting online visual psychophysics experiments: A replication assessment of two face processing studies Journal Article
In: Vision Research, vol. 233, 2025, ISSN: 00426989 (ISSN), (Publisher: Elsevier Ltd).
Abstract | Links | BibTeX | Tags: adult, article, cultural factor, experiment, Facial Recognition, female, geography, human, human experiment, Humans, information processing, laboratory, male, normal human, online system, Photic Stimulation, photostimulation, physiology, procedures, psychology, Psychophysics, recognition, stimulus response, vision, visual stimulation, Young Adult
@article{blais_conducting_2025,
title = {Conducting online visual psychophysics experiments: A replication assessment of two face processing studies},
author = {C. Blais and D. Fiset and L. Côté and V. Ledrou-Paquet and I. Charbonneau},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105004807446&doi=10.1016%2fj.visres.2025.108617&partnerID=40&md5=771b056e57c4d7a34ff7c56ce39a4bd2},
doi = {10.1016/j.visres.2025.108617},
issn = {00426989 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Vision Research},
volume = {233},
publisher = {Elsevier Ltd},
abstract = {In vision sciences, researchers rigorously control the testing environment and the physical properties of stimuli, making it challenging to conduct visual perception experiments online. However, online research offers key advantages, including access to larger and more diverse participant samples, helping to address the problem of underpowered studies and to enhance the generalizability of results. In face recognition research, increasing diversity is essential, especially considering evidence that cultural and geographical factors influence basic visual face processing. The present study tested a new online platform, Pack & Go from VPixx Technologies, that supports experiments written in MATLAB and Python. Two face recognition experiments based on a data-driven psychophysical method involving real-time stimulus manipulation and relying on functions from the Psychtoolbox were tested. In Experiment 1, the visual information used for face recognition was compared across four conditions that gradually reduced experimental control over the testing environment and stimulus properties. In Experiment 2, the association between face recognition abilities and information utilization was measured online and compared to lab-based results. In both experiments, results obtained in the lab and online were highly similar, demonstrating the potential of online research for vision science. © 2025 The Author(s)},
note = {Publisher: Elsevier Ltd},
keywords = {adult, article, cultural factor, experiment, Facial Recognition, female, geography, human, human experiment, Humans, information processing, laboratory, male, normal human, online system, Photic Stimulation, photostimulation, physiology, procedures, psychology, Psychophysics, recognition, stimulus response, vision, visual stimulation, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Banville, F.; Milhomme, D.; Perron, A.; Pinard, J.; Houle, J.; Therrien, D.; Peguero-Rodriguez, G.; Charette, S.; Ménélas, B. -A.; Trépanier, M.; Bouchard, S.
Using Virtual Reality to Improve Nurses’ Students’ Clinical Surveillance in a Critical Care Context: A Psychological Perspective on Learning Journal Article
In: Annual Review of CyberTherapy and Telemedicine, vol. 21, pp. 245–251, 2023, ISSN: 15548716, (Publisher: Interactive Media Institute).
Abstract | Links | BibTeX | Tags: article, clinical monitoring, cognition, controlled study, cybersickness, female, human, human experiment, intensive care, intensive care unit, interview, male, normal human, nursing student, psychological aspect, qualitative analysis, qualitative research, recovery room, skill, virtual reality
@article{banville_using_2023,
title = {Using Virtual Reality to Improve Nurses’ Students’ Clinical Surveillance in a Critical Care Context: A Psychological Perspective on Learning},
author = {F. Banville and D. Milhomme and A. Perron and J. Pinard and J. Houle and D. Therrien and G. Peguero-Rodriguez and S. Charette and B. -A. Ménélas and M. Trépanier and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85182468511&partnerID=40&md5=65f6f32f45ade940105c06386edd7a1c},
issn = {15548716},
year = {2023},
date = {2023-01-01},
journal = {Annual Review of CyberTherapy and Telemedicine},
volume = {21},
pages = {245–251},
publisher = {Interactive Media Institute},
abstract = {Nurse’s clinical judgement is important to provide optimal and safe care, particularly in a critical care unit. Clinical surveillance is an activity that nurses use every day and which requires crucial components to manage patients' risk of complications. To carry out this process, several cognitive functions and psychological attitudes are needed such as information and attention processing, judgement, decision-making, stress, and anxiety regulation. Since 2018, Milhomme, Banville et al. have been working to develop a Virtual Care Unit (VCU), using immersive virtual reality, intended to train future nurses to improve their competence towards clinical surveillance process skills. The aim of this qualitative descriptive study was to determine the pertinence to use VCU simulation with graduating nurses’ students to improve clinical surveillance skills in a critical care context. Thirteen nursing students were recruited to test the scenario through the VCU. Participants were instructed to carry surveillance process on a specific patient who suffer of an instability after a surgery. An interview guide of 11 questions was used for the data collection. The results show there are 10 facilitating and 9 restricting factors in the VCU that may play a role in nursing students’ learning clinical surveillance processes. Among these elements, four of them have an important link with a psychological perspective: 1) sense of presence; 2) cybersickness; 3) reflexive environment; 4) stress reduction. Results show an important contribution of several cognitive function in the clinical surveillance process learning by the virtual reality technology. © 2023, Interactive Media Institute. All rights reserved.},
note = {Publisher: Interactive Media Institute},
keywords = {article, clinical monitoring, cognition, controlled study, cybersickness, female, human, human experiment, intensive care, intensive care unit, interview, male, normal human, nursing student, psychological aspect, qualitative analysis, qualitative research, recovery room, skill, virtual reality},
pubstate = {published},
tppubtype = {article}
}
Plouffe-Demers, M. -P.; Saumure, C.; Fiset, D.; Cormier, S.; Blais, C.
Facial Expression of Pain: Sex Differences in the Discrimination of Varying Intensities Journal Article
In: Emotion, vol. 23, no. 5, pp. 1254–1266, 2022, ISSN: 15283542 (ISSN), (Publisher: American Psychological Association).
Abstract | Links | BibTeX | Tags: adult, article, controlled study, data-driven methods, effect size, Empathy, Facial Expression, facial expressions, female, human, human experiment, information processing, male, normal human, Pain, pain intensity, qualitative research, sample size, sex difference, sex differences, vision, visual acuity, visual information, Visual Perception
@article{plouffe-demers_facial_2022,
title = {Facial Expression of Pain: Sex Differences in the Discrimination of Varying Intensities},
author = {M. -P. Plouffe-Demers and C. Saumure and D. Fiset and S. Cormier and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85138214204&doi=10.1037%2femo0001156&partnerID=40&md5=d5063c7ab05722c16694952ac5d53027},
doi = {10.1037/emo0001156},
issn = {15283542 (ISSN)},
year = {2022},
date = {2022-01-01},
journal = {Emotion},
volume = {23},
number = {5},
pages = {1254–1266},
publisher = {American Psychological Association},
abstract = {It has been proposed that women are better than men at recognizing emotions and pain experienced by others. They have also been shown to be more sensitive to variations in pain expressions. The objective of the present study was to explore the perceptual basis of these sexual differences by comparing the visual information used by men and women to discriminate between different intensities of pain facial expressions. Using the data-driven Bubbles method, we were able to corroborate the woman advantage in the discrimination of pain intensities that did not appear to be explained by variations in empathic tendencies. In terms of visual strategies, our results do not indicate any qualitative differences in the facial regions used by men and women. However, they suggest that women rely on larger regions of the face that seems to completely mediate their advantage. This utilization of larger clusters could indicate either that women integrate simultaneously and more efficiently information coming from different areas of the face or that they are more flexible in the utilization of the information present in these clusters. Women would then opt for a more holistic or flexible processing of the facial information, while men would rely on a specific yet rigid integration strategy. © 2022 American Psychological Association},
note = {Publisher: American Psychological Association},
keywords = {adult, article, controlled study, data-driven methods, effect size, Empathy, Facial Expression, facial expressions, female, human, human experiment, information processing, male, normal human, Pain, pain intensity, qualitative research, sample size, sex difference, sex differences, vision, visual acuity, visual information, Visual Perception},
pubstate = {published},
tppubtype = {article}
}
Roy, C.; Blais, C.; Fiset, D.; Rainville, P.; Gosselin, F.
Efficient information for recognizing pain in facial expressions Journal Article
In: European Journal of Pain (United Kingdom), vol. 19, no. 6, pp. 852–860, 2015, ISSN: 10903801 (ISSN).
Abstract | Links | BibTeX | Tags: anger, article, association, Classification, Cues, disgust, emotion, Emotions, Facial Expression, Fear, female, happiness, human, human experiment, Humans, male, nociception, normal human, Pain, pain assessment, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, procedures, random sample, reproducibility, Reproducibility of Results, sadness, statistical significance, Visual, visual information, visual stimulation
@article{roy_efficient_2015,
title = {Efficient information for recognizing pain in facial expressions},
author = {C. Roy and C. Blais and D. Fiset and P. Rainville and F. Gosselin},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84929122739&doi=10.1002%2fejp.676&partnerID=40&md5=027f6da7b6d5c98c86de6a07766fb83d},
doi = {10.1002/ejp.676},
issn = {10903801 (ISSN)},
year = {2015},
date = {2015-01-01},
journal = {European Journal of Pain (United Kingdom)},
volume = {19},
number = {6},
pages = {852–860},
abstract = {Background The face as a visual stimulus is a reliable source of information for judging the pain experienced by others. Until now, most studies investigating the facial expression of pain have used a descriptive method (i.e. Facial Action Coding System). However, the facial features that are relevant for the observer in the identification of the expression of pain remain largely unknown despite the strong medical impact that misjudging pain can have on patients' well-being. Methods Here, we investigated this question by applying the Bubbles method. Fifty healthy volunteers were asked to categorize facial expressions (the six basic emotions, pain and neutrality) displayed in stimuli obtained from a previously validated set and presented for 500 ms each. To determine the critical areas of the face used in this categorization task, the faces were partly masked based on random sampling of regions of the stimuli at different spatial frequency ranges. Results Results show that accurate pain discrimination relies mostly on the frown lines and the mouth. Finally, an ideal observer analysis indicated that the use of the frown lines in human observers could not be attributed to the objective 'informativeness' of this area. Conclusions Based on a recent study suggesting that this area codes for the affective dimension of pain, we propose that the visual system has evolved to focus primarily on the facial cues that signal the aversiveness of pain, consistent with the social role of facial expressions in the communication of potential threats. © 2015 European Pain Federation-EFIC®.},
keywords = {anger, article, association, Classification, Cues, disgust, emotion, Emotions, Facial Expression, Fear, female, happiness, human, human experiment, Humans, male, nociception, normal human, Pain, pain assessment, Pattern Recognition, Photic Stimulation, photostimulation, physiology, priority journal, procedures, random sample, reproducibility, Reproducibility of Results, sadness, statistical significance, Visual, visual information, visual stimulation},
pubstate = {published},
tppubtype = {article}
}
Dennis, E.; Rouleau, J. -L.; Renaud, P.; Nolet, K.; Saumur, C.
A pilot development of virtual stimuli depicting affective dispositions for penile plethysmography assessment of sex offenders Journal Article
In: Canadian Journal of Human Sexuality, vol. 23, no. 3, pp. 200–208, 2014, ISSN: 11884517 (ISSN), (Publisher: University of Toronto Press Inc.).
Abstract | Links | BibTeX | Tags: adult, Affect, affective disposition, article, assessment, computer program, Facial Expression, Fear, female, happiness, heterosexuality, human, human experiment, male, normal human, penile blood flow, penile plethysmography, pilot study, Plethysmography, sadness, sex offenders, sexual arousal, Sexual Behavior, sexual crime, sexual orientation, undergraduate student, virtual reality, Virtual stimuli
@article{dennis_pilot_2014,
title = {A pilot development of virtual stimuli depicting affective dispositions for penile plethysmography assessment of sex offenders},
author = {E. Dennis and J. -L. Rouleau and P. Renaud and K. Nolet and C. Saumur},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84938522255&doi=10.3138%2fcjhs.2529&partnerID=40&md5=43cf6631bb2cd619ca0d049ae3a3b093},
doi = {10.3138/cjhs.2529},
issn = {11884517 (ISSN)},
year = {2014},
date = {2014-01-01},
journal = {Canadian Journal of Human Sexuality},
volume = {23},
number = {3},
pages = {200–208},
publisher = {University of Toronto Press Inc.},
abstract = {There are concerns regarding the reliability, realism, and validity of stimulus materials used in the assessment of sexual interests among sex offenders. This article explores new stimulus materials for use with penile plethysmography (PPG) assessments. First, this paper presents a pilot study where undergraduate students rated virtual characters (male and female) on perceived age. In addition, the materials developed are unique in that they depict the characters exhibiting varying affective dispositions, including neutral, fearful, sad, joyful, and seductive. Participants in the first study were also asked to identify the affective disposition of the virtual characters, and results suggest that affective disposition was largely perceived as intended, especially in terms of identifying the general emotional valence of the affective dispositions (i.e., positive versus negative). In a second pilot study, we used the computer-generated images to measure sexual arousal responses in a group of non-deviant males recruited in the community. Responses measured through penile plethysmography suggest participants responded to the stimuli as expected, as the greatest amount of sexual arousal was recorded when participants were shown the adult female character. In addition, participants responded with significant arousal only when the adult female character was depicted as sexually open (joyful or seductive), rather than sexually closed or neutral. Results suggest these materials may discriminate sexual interests if applied within clinical forensic assessment of sex offenders. © 2014 by the Sex Information and Education Council of Canada.},
note = {Publisher: University of Toronto Press Inc.},
keywords = {adult, Affect, affective disposition, article, assessment, computer program, Facial Expression, Fear, female, happiness, heterosexuality, human, human experiment, male, normal human, penile blood flow, penile plethysmography, pilot study, Plethysmography, sadness, sex offenders, sexual arousal, Sexual Behavior, sexual crime, sexual orientation, undergraduate student, virtual reality, Virtual stimuli},
pubstate = {published},
tppubtype = {article}
}
Joyal, C. C.; Jacob, L.; Cigna, M. -H.; Guay, J. -P.; Renaud, P.
Virtual faces expressing emotions: An initial concomitant and construct validity study Journal Article
In: Frontiers in Human Neuroscience, vol. 8, no. SEP, pp. 1–6, 2014, ISSN: 16625161, (Publisher: Frontiers Media S. A.).
Abstract | Links | BibTeX | Tags: adult, anger, article, computer program, construct validity, corrugator supercilii muscle, disgust, Electromyography, emotion, emotionality, face muscle, Facial Expression, Fear, female, gaze, happiness, human, human experiment, male, Middle Aged, muscle contraction, normal human, positive feedback, sadness, surprise, task performance, virtual reality, Young Adult, zygomatic major muscle
@article{joyal_virtual_2014,
title = {Virtual faces expressing emotions: An initial concomitant and construct validity study},
author = {C. C. Joyal and L. Jacob and M. -H. Cigna and J. -P. Guay and P. Renaud},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84933679803&doi=10.3389%2ffnhum.2014.00787&partnerID=40&md5=c51b26765fb1e2152cede99adcd519b0},
doi = {10.3389/fnhum.2014.00787},
issn = {16625161},
year = {2014},
date = {2014-01-01},
journal = {Frontiers in Human Neuroscience},
volume = {8},
number = {SEP},
pages = {1–6},
publisher = {Frontiers Media S. A.},
abstract = {Objectives: The goal of this study was to initially assess concomitants and construct validity of a newly developed set of virtual faces expressing six fundamental emotions (happiness, surprise, anger, sadness, fear, and disgust). Recognition rates, facial electromyography (zygomatic major and corrugator supercilii muscles), and regional gaze fixation latencies (eyes and mouth regions) were compared in 41 adult volunteers (20 ♂, 21 ♀) during the presentation of video clips depicting real vs. virtual adults expressing emotions. Background: Facial expressions of emotions represent classic stimuli for the studyofsocial cognition. Developing virtual dynamic facial expressions ofemotions, however, would open-up possibilities, both for fundamental and clinical research. For instance, virtual faces allow real-time Human–Computer retroactions between physiological measures and the virtual agent. Results: Emotions expressed by each set of stimuli were similarly recognized, both by men and women. Accordingly, both sets of stimuli elicited similar activation of facial muscles and similar ocular fixation times in eye regions from man and woman participants. Conclusion: Further validation studies can be performed with these virtual faces among clinical populations known to present social cognition difficulties. Brain–Computer Interface studies with feedback–feedforward interactions based on facial emotion expressions can also be conducted with these stimuli. © 2014 Joyal, Jacob, Cigna, Guay and Renaud.},
note = {Publisher: Frontiers Media S. A.},
keywords = {adult, anger, article, computer program, construct validity, corrugator supercilii muscle, disgust, Electromyography, emotion, emotionality, face muscle, Facial Expression, Fear, female, gaze, happiness, human, human experiment, male, Middle Aged, muscle contraction, normal human, positive feedback, sadness, surprise, task performance, virtual reality, Young Adult, zygomatic major muscle},
pubstate = {published},
tppubtype = {article}
}
Blais, C.; Arguin, M.; Gosselin, F.
Human visual processing oscillates: Evidence from a classification image technique Journal Article
In: Cognition, vol. 128, no. 3, pp. 353–362, 2013, ISSN: 00100277.
Abstract | Links | BibTeX | Tags: amplitude modulation, article, Face, female, human, human experiment, Humans, male, normal human, oscillation, Oscillations, Pattern Recognition, Photic Stimulation, priority journal, reaction time, signal noise ratio, Signal-To-Noise Ratio, stimulus response, Temporal processing, vision, Visual, visual acuity, Visual Perception, Visual sampling, visual stimulation
@article{blais_human_2013,
title = {Human visual processing oscillates: Evidence from a classification image technique},
author = {C. Blais and M. Arguin and F. Gosselin},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84879014749&doi=10.1016%2fj.cognition.2013.04.009&partnerID=40&md5=c2d20982fa4a5c46b9d99d2912284ff6},
doi = {10.1016/j.cognition.2013.04.009},
issn = {00100277},
year = {2013},
date = {2013-01-01},
journal = {Cognition},
volume = {128},
number = {3},
pages = {353–362},
abstract = {Recent investigations have proposed that visual information may be sampled in a discrete manner, similarly to the snapshots of a camera, but this hypothesis remains controversial. Moreover, assuming a discrete sampling of information, the properties of this sampling-for instance, the frequency at which it operates, and how it synchronizes with the environment-still need to be clarified. We systematically modulated the signal-to-noise ratio of faces through time and examined how it impacted face identification performance. Altogether, our results support the hypothesis of discrete sampling. Furthermore, they suggest that this mechanism may operate at a rate of about 10-15. Hz and that it is synchronized with the onset of the stimulus. © 2013 Elsevier B.V.},
keywords = {amplitude modulation, article, Face, female, human, human experiment, Humans, male, normal human, oscillation, Oscillations, Pattern Recognition, Photic Stimulation, priority journal, reaction time, signal noise ratio, Signal-To-Noise Ratio, stimulus response, Temporal processing, vision, Visual, visual acuity, Visual Perception, Visual sampling, visual stimulation},
pubstate = {published},
tppubtype = {article}
}
Blais, C.; Roy, C.; Fiset, D.; Arguin, M.; Gosselin, F.
The eyes are not the window to basic emotions Journal Article
In: Neuropsychologia, vol. 50, no. 12, pp. 2830–2838, 2012, ISSN: 00283932.
Abstract | Links | BibTeX | Tags: adult, analytic method, article, association, association cortex, cognition, Cues, Discrimination (Psychology), discriminative stimulus, dynamic stimulus, emotion, Emotions, Eye, Facial Expression, female, Fixation, human, human experiment, Humans, male, Mouth, normal human, Ocular, Pattern Recognition, Photic Stimulation, static stimulus, task performance, Visual, visual discrimination, visual information, visual memory, visual system function, Young Adult
@article{blais_eyes_2012,
title = {The eyes are not the window to basic emotions},
author = {C. Blais and C. Roy and D. Fiset and M. Arguin and F. Gosselin},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84865829171&doi=10.1016%2fj.neuropsychologia.2012.08.010&partnerID=40&md5=8a46d347f96ea9bd94bd161b6f1e8b92},
doi = {10.1016/j.neuropsychologia.2012.08.010},
issn = {00283932},
year = {2012},
date = {2012-01-01},
journal = {Neuropsychologia},
volume = {50},
number = {12},
pages = {2830–2838},
abstract = {Facial expressions are one of the most important ways to communicate our emotional state. In popular culture and in the scientific literature on face processing, the eye area is often conceived as a very important - if not the most important - cue for the recognition of facial expressions. In support of this, an underutilization of the eye area is often observed in clinical populations with a deficit in the recognition of facial expressions of emotions. Here, we used the Bubbles technique to verify which facial cue is the most important when it comes to discriminating between eight static and dynamic facial expressions (i.e., six basic emotions, pain and a neutral expression). We found that the mouth area is the most important cue for both static and dynamic facial expressions. We conducted an ideal observer analysis on the static expressions and determined that the mouth area is the most informative. However, we found an underutilization of the eye area by human participants in comparison to the ideal observer. We then demonstrated that the mouth area contains the most discriminative motions across expressions. We propose that the greater utilization of the mouth area by the human participants might come from remnants of the strategy the brain has developed with dynamic stimuli, and/or from a strategy whereby the most informative area is prioritized due to the limited capacity of the visuo-cognitive system. © 2012 Elsevier Ltd.},
keywords = {adult, analytic method, article, association, association cortex, cognition, Cues, Discrimination (Psychology), discriminative stimulus, dynamic stimulus, emotion, Emotions, Eye, Facial Expression, female, Fixation, human, human experiment, Humans, male, Mouth, normal human, Ocular, Pattern Recognition, Photic Stimulation, static stimulus, task performance, Visual, visual discrimination, visual information, visual memory, visual system function, Young Adult},
pubstate = {published},
tppubtype = {article}
}



