

de Recherche et d’Innovation
en Cybersécurité et Société
Joyal, C. C.; Jacob, L.; Cigna, M. -H.; Guay, J. -P.; Renaud, P.
Virtual faces expressing emotions: An initial concomitant and construct validity study Article de journal
Dans: Frontiers in Human Neuroscience, vol. 8, no SEP, p. 1–6, 2014, ISSN: 16625161, (Publisher: Frontiers Media S. A.).
Résumé | Liens | BibTeX | Étiquettes: adult, anger, article, computer program, construct validity, corrugator supercilii muscle, disgust, Electromyography, emotion, emotionality, face muscle, Facial Expression, Fear, female, gaze, happiness, human, human experiment, male, Middle Aged, muscle contraction, normal human, positive feedback, sadness, surprise, task performance, virtual reality, Young Adult, zygomatic major muscle
@article{joyal_virtual_2014,
title = {Virtual faces expressing emotions: An initial concomitant and construct validity study},
author = {C. C. Joyal and L. Jacob and M. -H. Cigna and J. -P. Guay and P. Renaud},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84933679803&doi=10.3389%2ffnhum.2014.00787&partnerID=40&md5=c51b26765fb1e2152cede99adcd519b0},
doi = {10.3389/fnhum.2014.00787},
issn = {16625161},
year = {2014},
date = {2014-01-01},
journal = {Frontiers in Human Neuroscience},
volume = {8},
number = {SEP},
pages = {1–6},
abstract = {Objectives: The goal of this study was to initially assess concomitants and construct validity of a newly developed set of virtual faces expressing six fundamental emotions (happiness, surprise, anger, sadness, fear, and disgust). Recognition rates, facial electromyography (zygomatic major and corrugator supercilii muscles), and regional gaze fixation latencies (eyes and mouth regions) were compared in 41 adult volunteers (20 ♂, 21 ♀) during the presentation of video clips depicting real vs. virtual adults expressing emotions. Background: Facial expressions of emotions represent classic stimuli for the studyofsocial cognition. Developing virtual dynamic facial expressions ofemotions, however, would open-up possibilities, both for fundamental and clinical research. For instance, virtual faces allow real-time Human–Computer retroactions between physiological measures and the virtual agent. Results: Emotions expressed by each set of stimuli were similarly recognized, both by men and women. Accordingly, both sets of stimuli elicited similar activation of facial muscles and similar ocular fixation times in eye regions from man and woman participants. Conclusion: Further validation studies can be performed with these virtual faces among clinical populations known to present social cognition difficulties. Brain–Computer Interface studies with feedback–feedforward interactions based on facial emotion expressions can also be conducted with these stimuli. © 2014 Joyal, Jacob, Cigna, Guay and Renaud.},
note = {Publisher: Frontiers Media S. A.},
keywords = {adult, anger, article, computer program, construct validity, corrugator supercilii muscle, disgust, Electromyography, emotion, emotionality, face muscle, Facial Expression, Fear, female, gaze, happiness, human, human experiment, male, Middle Aged, muscle contraction, normal human, positive feedback, sadness, surprise, task performance, virtual reality, Young Adult, zygomatic major muscle},
pubstate = {published},
tppubtype = {article}
}
Renaud, P.; Chartier, S.; Albert, G.; Décarie, J.; Cournoyer, L. -G.; Bouchard, S.
Presence as determined by fractal perceptual-motor dynamics Article de journal
Dans: Cyberpsychology and Behavior, vol. 10, no 1, p. 122–130, 2007, ISSN: 10949313.
Résumé | Liens | BibTeX | Étiquettes: adult, article, eye movement, Eye movements, female, gaze, human, Humans, immersion, male, mathematical computing, motor performance, perceptual motor dynamics, Psychomotor Performance, simulation, Social Environment, Social Perception, standard, three dimensional imaging, User-Computer Interface, virtual reality modeling language, Visual Perception
@article{renaud_presence_2007,
title = {Presence as determined by fractal perceptual-motor dynamics},
author = {P. Renaud and S. Chartier and G. Albert and J. Décarie and L. -G. Cournoyer and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-33847713017&doi=10.1089%2fcpb.2006.9983&partnerID=40&md5=c1c6df654279a13b1553e9cfbf43acd0},
doi = {10.1089/cpb.2006.9983},
issn = {10949313},
year = {2007},
date = {2007-01-01},
journal = {Cyberpsychology and Behavior},
volume = {10},
number = {1},
pages = {122–130},
abstract = {This paper presents a tentative model of the role of perceptual-motor dynamics in the emergence of the feeling of presence. A new method allowing the measure of how gaze probes three-dimensional space in immersion is used to support this model. Fractal computations of gaze behavior are shown to be more effective titan standard computations of eye movements in predicting presence. © Mary Ann Liebert, Inc.},
keywords = {adult, article, eye movement, Eye movements, female, gaze, human, Humans, immersion, male, mathematical computing, motor performance, perceptual motor dynamics, Psychomotor Performance, simulation, Social Environment, Social Perception, standard, three dimensional imaging, User-Computer Interface, virtual reality modeling language, Visual Perception},
pubstate = {published},
tppubtype = {article}
}
Albert, G.; Renaud, P.; Chartier, S.; Renaud, L.; Sauvé, L.; Bouchard, S.
Scene perception, gaze behavior, and perceptual learning in virtual environments Article de journal
Dans: Cyberpsychology and Behavior, vol. 8, no 6, p. 592–600, 2005, ISSN: 10949313.
Résumé | Liens | BibTeX | Étiquettes: adult, article, behavior, controlled study, eye fixation, eye movement, female, Fixation, gaze, human, human experiment, Humans, immersion, Learning, male, mental performance, normal human, Ocular, perceptual learning, qualitative validity, User-Computer Interface, validation process, virtual reality, Visual Perception
@article{albert_scene_2005,
title = {Scene perception, gaze behavior, and perceptual learning in virtual environments},
author = {G. Albert and P. Renaud and S. Chartier and L. Renaud and L. Sauvé and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-29444459017&doi=10.1089%2fcpb.2005.8.592&partnerID=40&md5=87150bf324f101229007a48f91517702},
doi = {10.1089/cpb.2005.8.592},
issn = {10949313},
year = {2005},
date = {2005-01-01},
journal = {Cyberpsychology and Behavior},
volume = {8},
number = {6},
pages = {592–600},
abstract = {More and more immersive environments are developed to provide support for learning or training purposes. Ecological validity of such environments is usually based on learning performance comparisons between virtual environments and their genuine counterparts. Little is known about learning processes occurring in immersive environments. A new technique is proposed for testing perceptual learning during virtual immersion. This methodology relies upon eye-tracking technologies to analyze gaze behavior recorded in relation to virtual objects' features and tasks' requirements. It is proposed that perceptual learning mechanisms engaged could be detected through eye movements. In this study, nine subjects performed perceptual learning tasks in virtual immersion. Results obtained indicated that perceptual learning influences gaze behavior dynamics. More precisely, analysis revealed that fixation number and variability in fixation duration varied with perceptual learning level. Such findings could contribute in shedding light on learning mechanisms as well as providing additional support for validating virtual learning environments. © Mary Ann Liebert, Inc.},
keywords = {adult, article, behavior, controlled study, eye fixation, eye movement, female, Fixation, gaze, human, human experiment, Humans, immersion, Learning, male, mental performance, normal human, Ocular, perceptual learning, qualitative validity, User-Computer Interface, validation process, virtual reality, Visual Perception},
pubstate = {published},
tppubtype = {article}
}