

de Recherche et d’Innovation
en Cybersécurité et Société
Bogie, B. J. M.; Noël, C.; Gu, F.; Nadeau, S.; Shvetz, C.; Khan, H.; Rivard, M. -C.; Bouchard, S.; Lepage, M.; Guimond, S.
Using virtual reality to improve verbal episodic memory in schizophrenia: A proof-of-concept trial Article de journal
Dans: Schizophrenia Research: Cognition, vol. 36, 2024, ISSN: 22150013 (ISSN), (Publisher: Elsevier Inc.).
Résumé | Liens | BibTeX | Étiquettes: adult, article, clinical article, clinical assessment, Cognitive remediation therapy, cybersickness, disease severity, dizziness, Ecological treatment, Episodic memory, exclusion VR criteria questionnaire, feasibility study, female, Hopkins verbal learning test, human, male, mini international neuropsychiatric interview, nausea, outcome assessment, Positive and Negative Syndrome Scale, Proof of concept, questionnaire, randomized controlled trial, schizophrenia, scoring system, Semantic encoding, Semantics, task performance, training, Verbal memory, virtual reality, vr experience questionnaire
@article{bogie_using_2024,
title = {Using virtual reality to improve verbal episodic memory in schizophrenia: A proof-of-concept trial},
author = {B. J. M. Bogie and C. Noël and F. Gu and S. Nadeau and C. Shvetz and H. Khan and M. -C. Rivard and S. Bouchard and M. Lepage and S. Guimond},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85186986986&doi=10.1016%2fj.scog.2024.100305&partnerID=40&md5=a15c598b45b8f44a40b25fe5fd078a06},
doi = {10.1016/j.scog.2024.100305},
issn = {22150013 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Schizophrenia Research: Cognition},
volume = {36},
abstract = {Background: Schizophrenia is associated with impairments in verbal episodic memory. Strategy for Semantic Association Memory (SESAME) training represents a promising cognitive remediation program to improve verbal episodic memory. Virtual reality (VR) may be a novel tool to increase the ecological validity and transfer of learned skills of traditional cognitive remediation programs. The present proof-of-concept study aimed to assess the feasibility, acceptability, and preliminary efficacy of a VR-based cognitive remediation module inspired by SESAME principles to improve the use of verbal episodic memory strategies in schizophrenia. Methods: Thirty individuals with schizophrenia/schizoaffective disorder completed this study. Participants were randomized to either a VR-based verbal episodic memory training condition inspired by SESAME principles (intervention group) or an active control condition (control group). In the training condition, a coach taught semantic encoding strategies (active rehearsal and semantic clustering) to help participants remember restaurant orders in VR. In the active control condition, participants completed visuospatial puzzles in VR. Attrition rate, participant experience ratings, and cybersickness questionnaires were used to assess feasibility and acceptability. Trial 1 of the Hopkins Verbal Learning Test – Revised was administered pre- and post-intervention to assess preliminary efficacy. Results: Feasibility was demonstrated by a low attrition rate (5.88 %), and acceptability was demonstrated by limited cybersickness and high levels of enjoyment. Although the increase in the number of semantic clusters used following the module did not reach conventional levels of statistical significance in the intervention group, it demonstrated a notable trend with a medium effect size (t = 1.48},
note = {Publisher: Elsevier Inc.},
keywords = {adult, article, clinical article, clinical assessment, Cognitive remediation therapy, cybersickness, disease severity, dizziness, Ecological treatment, Episodic memory, exclusion VR criteria questionnaire, feasibility study, female, Hopkins verbal learning test, human, male, mini international neuropsychiatric interview, nausea, outcome assessment, Positive and Negative Syndrome Scale, Proof of concept, questionnaire, randomized controlled trial, schizophrenia, scoring system, Semantic encoding, Semantics, task performance, training, Verbal memory, virtual reality, vr experience questionnaire},
pubstate = {published},
tppubtype = {article}
}
Cipolletta, S.; Tomaino, S.; Bouchard, S.; Berthiaume, M.; Manzoni, G.
Validation of the Italian Version of the Telepresence in Videoconference Scale (TVS) in a Sample of Psychologists and Psychotherapists During the COVID-19 Pandemic Article de journal
Dans: Clinical Psychology and Psychotherapy, vol. 31, no 3, 2024, ISSN: 10633995 (ISSN), (Publisher: John Wiley and Sons Ltd).
Résumé | Liens | BibTeX | Étiquettes: adult, coronavirus disease 2019, COVID-19, female, human, Humans, Italy, male, Middle Aged, online psychological interventions, pandemic, Pandemics, procedures, psychology, Psychometrics, psychometry, psychotherapist, Psychotherapists, psychotherapy, questionnaire, reproducibility, Reproducibility of Results, SARS-CoV-2, Severe acute respiratory syndrome coronavirus 2, Surveys and Questionnaires, Telemedicine, telepresence, Validation, videoconference, videoconferencing
@article{cipolletta_validation_2024,
title = {Validation of the Italian Version of the Telepresence in Videoconference Scale (TVS) in a Sample of Psychologists and Psychotherapists During the COVID-19 Pandemic},
author = {S. Cipolletta and S. Tomaino and S. Bouchard and M. Berthiaume and G. Manzoni},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85195533802&doi=10.1002%2fcpp.3015&partnerID=40&md5=255de2597dc6747136150f331ae970ab},
doi = {10.1002/cpp.3015},
issn = {10633995 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Clinical Psychology and Psychotherapy},
volume = {31},
number = {3},
abstract = {Objective: Telepresence may play a fundamental role in establishing authentic interactions and relationships in online psychological interventions and can be measured by the Telepresence in Videoconference Scale (TVS), which was validated only with patients to date. This post hoc study aimed to validate the Italian version of the TVS with mental health professionals. Method: The Italian TVS was included in an online survey, whose primary aim was to assess the experiences of Italian psychologists and psychotherapists with online interventions during the first wave of the COVID-19 pandemic and was filled in by 296 participants (83.4% females, mean age = 42 years old). Results: Exploratory factor analysis supported the original factor structure only partially because the scale ‘Absorption’ (i.e., the feeling of losing track of time), as it was formulated, did not measure telepresence. Correlations were also explored between the TVS scales and some survey items pertaining to intimacy and emotional closeness to patients, comfort and positive as well as negative experiences with online interventions. Conclusion: The TVS may be a useful tool to measure physical and social telepresence in online interventions, both in patients and in professionals. © 2024 John Wiley & Sons Ltd.},
note = {Publisher: John Wiley and Sons Ltd},
keywords = {adult, coronavirus disease 2019, COVID-19, female, human, Humans, Italy, male, Middle Aged, online psychological interventions, pandemic, Pandemics, procedures, psychology, Psychometrics, psychometry, psychotherapist, Psychotherapists, psychotherapy, questionnaire, reproducibility, Reproducibility of Results, SARS-CoV-2, Severe acute respiratory syndrome coronavirus 2, Surveys and Questionnaires, Telemedicine, telepresence, Validation, videoconference, videoconferencing},
pubstate = {published},
tppubtype = {article}
}
Pétrin, R.; Bérubé, A.; St-Pierre, É.; Blais, C.
Maternal childhood emotional abuse increases cardiovascular responses to children’s emotional facial expressions Article de journal
Dans: PLoS ONE, vol. 19, no 5 May, 2024, ISSN: 19326203 (ISSN), (Publisher: Public Library of Science).
Résumé | Liens | BibTeX | Étiquettes: adult, alcohol consumption, analysis of variance, article, blood pressure, cardiovascular response, Child, Child Abuse, Childhood, Childhood Trauma Questionnaire, demographics, electrocardiogram, Electrocardiography, emotion, Emotional Abuse, Emotions, Ethnicity, Facial Expression, female, Heart Rate, heart rate variability, human, human experiment, Humans, Likert scale, male, mother, mother child relation, Mother-Child Relations, Mothers, parasympathetic tone, physical activity, physiology, post hoc analysis, psychology, questionnaire, sexual abuse, Surveys and Questionnaires
@article{petrin_maternal_2024,
title = {Maternal childhood emotional abuse increases cardiovascular responses to children’s emotional facial expressions},
author = {R. Pétrin and A. Bérubé and É. St-Pierre and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85192637581&doi=10.1371%2fjournal.pone.0302782&partnerID=40&md5=c464b30fe7cc5b7b0baaf865fdf1f6de},
doi = {10.1371/journal.pone.0302782},
issn = {19326203 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {PLoS ONE},
volume = {19},
number = {5 May},
abstract = {Parents with a history of childhood maltreatment may be more likely to respond inadequately to their child’s emotional cues, such as crying or screaming, due to previous exposure to prolonged stress. While studies have investigated parents’ physiological reactions to their children’s vocal expressions of emotions, less attention has been given to their responses when perceiving children’s facial expressions of emotions. The present study aimed to determine if viewing facial expressions of emotions in children induces cardiovascular changes in mothers (hypo- or hyper-arousal) and whether these differ as a function of childhood maltreatment. A total of 104 mothers took part in this study. Their experiences of childhood maltreatment were measured using the Childhood Trauma Questionnaire (CTQ). Participants’ electrocardiogram signals were recorded during a task in which they viewed a landscape video (baseline) and images of children’s faces expressing different intensities of emotion. Heart rate variability (HRV) was extracted from the recordings as an indicator of parasympathetic reactivity. Participants presented two profiles: one group of mothers had a decreased HRV when presented with images of children’s facial expressions of emotions, while the other group’s HRV increased. However, HRV change was not significantly different between the two groups. The interaction between HRV groups and the severity of maltreatment experienced was marginal. Results suggested that experiences of childhood emotional abuse were more common in mothers whose HRV increased during the task. Therefore, more severe childhood experiences of emotional abuse could be associated with mothers’ cardiovascular hyperreactivity. Maladaptive cardiovascular responses could have a ripple effect, influencing how mothers react to their children’s facial expressions of emotions. That reaction could affect the quality of their interaction with their child. Providing interventions that help parents regulate their physiological and behavioral responses to stress might be helpful, especially if they have experienced childhood maltreatment. © 2024 Public Library of Science. All rights reserved.},
note = {Publisher: Public Library of Science},
keywords = {adult, alcohol consumption, analysis of variance, article, blood pressure, cardiovascular response, Child, Child Abuse, Childhood, Childhood Trauma Questionnaire, demographics, electrocardiogram, Electrocardiography, emotion, Emotional Abuse, Emotions, Ethnicity, Facial Expression, female, Heart Rate, heart rate variability, human, human experiment, Humans, Likert scale, male, mother, mother child relation, Mother-Child Relations, Mothers, parasympathetic tone, physical activity, physiology, post hoc analysis, psychology, questionnaire, sexual abuse, Surveys and Questionnaires},
pubstate = {published},
tppubtype = {article}
}
Sheehy, L.; Bouchard, S.; Kakkar, A.; Hakim, R. El; Lhoest, J.; Frank, A.
Development and Initial Testing of an Artificial Intelligence-Based Virtual Reality Companion for People Living with Dementia in Long-Term Care Article de journal
Dans: Journal of Clinical Medicine, vol. 13, no 18, 2024, ISSN: 20770383 (ISSN), (Publisher: Multidisciplinary Digital Publishing Institute (MDPI)).
Résumé | Liens | BibTeX | Étiquettes: aged, article, Artificial intelligence, cognitive decline, cognitive impairment, compassion, conversation, dementia, Elderly, female, human, large language models, long term care, long-term care, major clinical study, male, program acceptability, program feasibility, reaction time, reminiscence, speech discrimination, very elderly, virtual reality
@article{sheehy_development_2024,
title = {Development and Initial Testing of an Artificial Intelligence-Based Virtual Reality Companion for People Living with Dementia in Long-Term Care},
author = {L. Sheehy and S. Bouchard and A. Kakkar and R. El Hakim and J. Lhoest and A. Frank},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85205071099&doi=10.3390%2fjcm13185574&partnerID=40&md5=844732ff858a0d5feb0a95a54093ad4d},
doi = {10.3390/jcm13185574},
issn = {20770383 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Journal of Clinical Medicine},
volume = {13},
number = {18},
abstract = {Background/Objectives: Feelings of loneliness are common in people living with dementia (PLWD) in long-term care (LTC). The goals of this study were to describe the development of a novel virtual companion for PLWD living in LTC and assess its feasibility and acceptability. Methods: The computer-generated virtual companion, presented using a head-mounted virtual reality display, was developed in two stages. In Stage 1, the virtual companion asked questions designed to encourage conversation and reminiscence. In Stage 2, more powerful artificial intelligence tools allowed the virtual companion to engage users in nuanced discussions on any topic. PLWD in LTC tested the application at each stage to assess feasibility and acceptability. Results: Ten PLWD living in LTC participated in Stage 1 (4 men and 6 women; average 82 years old) and Stage 2 (2 men and 8 women; average 87 years old). Session lengths ranged from 0:00 to 5:30 min in Stage 1 and 0:00 to 53:50 min in Stage 2. Speech recognition issues and a limited repertoire of questions limited acceptance in Stage 1. Enhanced conversational ability in Stage 2 led to intimate and meaningful conversations with many participants. Many users found the head-mounted display heavy. There were no complaints of simulator sickness. The virtual companion was best suited to PLWD who could engage in reciprocal conversation. After Stage 2, response latency was identified as an opportunity for improvement in future versions. Conclusions: Virtual reality and artificial intelligence can be used to create a virtual companion that is acceptable and enjoyable to some PLWD living in LTC. Ongoing innovations in hardware and software will allow future iterations to provide more natural conversational interaction and an enhanced social experience. © 2024 by the authors.},
note = {Publisher: Multidisciplinary Digital Publishing Institute (MDPI)},
keywords = {aged, article, Artificial intelligence, cognitive decline, cognitive impairment, compassion, conversation, dementia, Elderly, female, human, large language models, long term care, long-term care, major clinical study, male, program acceptability, program feasibility, reaction time, reminiscence, speech discrimination, very elderly, virtual reality},
pubstate = {published},
tppubtype = {article}
}
Bérubé, A.; Pétrin, R.; Blais, C.
Parental depression moderates the relationship between childhood maltreatment and the recognition of children expressions of emotions Article de journal
Dans: Frontiers in Psychiatry, vol. 15, 2024, ISSN: 16640640 (ISSN), (Publisher: Frontiers Media SA).
Résumé | Liens | BibTeX | Étiquettes: adult, anger, article, Beck Depression Inventory, Child, Child Abuse, child parent relation, childhood maltreatment, Childhood Trauma Questionnaire, Depression, disease severity, disgust, educational status, emotion, Emotion Recognition, Facial Expression, female, happiness, human, income, major clinical study, male, parent-child relationship, parental sensitivity, preschool child, questionnaire, recognition, sadness
@article{berube_parental_2024,
title = {Parental depression moderates the relationship between childhood maltreatment and the recognition of children expressions of emotions},
author = {A. Bérubé and R. Pétrin and C. Blais},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85196266525&doi=10.3389%2ffpsyt.2024.1374872&partnerID=40&md5=ce03a1c39e709fc0f2c773d4f82f3a10},
doi = {10.3389/fpsyt.2024.1374872},
issn = {16640640 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Frontiers in Psychiatry},
volume = {15},
abstract = {Background: Sensitivity plays a crucial role in parenting as it involves the ability to perceive and respond appropriately to children’s signals. Childhood maltreatment and depression can negatively impact adults’ ability to recognize emotions, but it is unclear which of these factors has a greater impact or how they interact. This knowledge is central to developing efficient, targeted interventions. This paper examines the interaction between parents’ depressive symptoms and childhood maltreatment and its influence on their ability to recognize the five basic emotions (happiness, anger, sadness, fear, and disgust) in children’s faces. Method: The sample consisted of 52 parents. Depressive symptoms were measured by the depression subscale of the Brief Symptom Inventory-18 (BSI-18), and maltreatment history was assessed by the Childhood Trauma Questionnaire (CTQ). Children’s emotional stimuli were morphed images created using The Child Affective Facial Expression (CAFE) database. Results: Our findings indicate that depressive symptoms moderate the relationship between parents’ history of childhood maltreatment and emotion recognition skills. Parents with higher depressive symptoms had lower emotion recognition accuracy when they had not experienced maltreatment. When childhood maltreatment was severe, emotion recognition skills were more consistent across all levels of depression. The relationship between depression and emotion recognition was primarily linked to recognizing sadness in children’s faces. Conclusion: These findings highlight how different experiences can affect parental abilities in emotion recognition and emphasize the need for interventions tailored to individual profiles to improve their effectiveness. Copyright © 2024 Bérubé, Pétrin and Blais.},
note = {Publisher: Frontiers Media SA},
keywords = {adult, anger, article, Beck Depression Inventory, Child, Child Abuse, child parent relation, childhood maltreatment, Childhood Trauma Questionnaire, Depression, disease severity, disgust, educational status, emotion, Emotion Recognition, Facial Expression, female, happiness, human, income, major clinical study, male, parent-child relationship, parental sensitivity, preschool child, questionnaire, recognition, sadness},
pubstate = {published},
tppubtype = {article}
}
Joudeh, I. O.; Cretu, A. -M.; Bouchard, S.
Predicting the Arousal and Valence Values of Emotional States Using Learned, Predesigned, and Deep Visual Features † Article de journal
Dans: Sensors, vol. 24, no 13, 2024, ISSN: 14248220 (ISSN), (Publisher: Multidisciplinary Digital Publishing Institute (MDPI)).
Résumé | Liens | BibTeX | Étiquettes: adult, Affective interaction, Arousal, artificial neural network, Cognitive state, Cognitive/emotional state, Collaborative interaction, computer, Convolutional neural networks, correlation coefficient, Deep learning, emotion, Emotional state, Emotions, female, Forecasting, Helmet mounted displays, human, Humans, Learning algorithms, Learning systems, Long short-term memory, Machine learning, Machine-learning, male, Mean square error, Neural networks, physiology, Regression, Root mean squared errors, Video recording, virtual reality, Visual feature, visual features
@article{joudeh_predicting_2024,
title = {Predicting the Arousal and Valence Values of Emotional States Using Learned, Predesigned, and Deep Visual Features †},
author = {I. O. Joudeh and A. -M. Cretu and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85198382238&doi=10.3390%2fs24134398&partnerID=40&md5=cefa8b2e2c044d02f99662af350007db},
doi = {10.3390/s24134398},
issn = {14248220 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Sensors},
volume = {24},
number = {13},
abstract = {The cognitive state of a person can be categorized using the circumplex model of emotional states, a continuous model of two dimensions: arousal and valence. The purpose of this research is to select a machine learning model(s) to be integrated into a virtual reality (VR) system that runs cognitive remediation exercises for people with mental health disorders. As such, the prediction of emotional states is essential to customize treatments for those individuals. We exploit the Remote Collaborative and Affective Interactions (RECOLA) database to predict arousal and valence values using machine learning techniques. RECOLA includes audio, video, and physiological recordings of interactions between human participants. To allow learners to focus on the most relevant data, features are extracted from raw data. Such features can be predesigned, learned, or extracted implicitly using deep learners. Our previous work on video recordings focused on predesigned and learned visual features. In this paper, we extend our work onto deep visual features. Our deep visual features are extracted using the MobileNet-v2 convolutional neural network (CNN) that we previously trained on RECOLA’s video frames of full/half faces. As the final purpose of our work is to integrate our solution into a practical VR application using head-mounted displays, we experimented with half faces as a proof of concept. The extracted deep features were then used to predict arousal and valence values via optimizable ensemble regression. We also fused the extracted visual features with the predesigned visual features and predicted arousal and valence values using the combined feature set. In an attempt to enhance our prediction performance, we further fused the predictions of the optimizable ensemble model with the predictions of the MobileNet-v2 model. After decision fusion, we achieved a root mean squared error (RMSE) of 0.1140, a Pearson’s correlation coefficient (PCC) of 0.8000, and a concordance correlation coefficient (CCC) of 0.7868 on arousal predictions. We achieved an RMSE of 0.0790, a PCC of 0.7904, and a CCC of 0.7645 on valence predictions. © 2024 by the authors.},
note = {Publisher: Multidisciplinary Digital Publishing Institute (MDPI)},
keywords = {adult, Affective interaction, Arousal, artificial neural network, Cognitive state, Cognitive/emotional state, Collaborative interaction, computer, Convolutional neural networks, correlation coefficient, Deep learning, emotion, Emotional state, Emotions, female, Forecasting, Helmet mounted displays, human, Humans, Learning algorithms, Learning systems, Long short-term memory, Machine learning, Machine-learning, male, Mean square error, Neural networks, physiology, Regression, Root mean squared errors, Video recording, virtual reality, Visual feature, visual features},
pubstate = {published},
tppubtype = {article}
}
Banville, F.; Milhomme, D.; Perron, A.; Pinard, J.; Houle, J.; Therrien, D.; Peguero-Rodriguez, G.; Charette, S.; Ménélas, B. -A.; Trépanier, M.; Bouchard, S.
Using Virtual Reality to Improve Nurses’ Students’ Clinical Surveillance in a Critical Care Context: A Psychological Perspective on Learning Article de journal
Dans: Annual Review of CyberTherapy and Telemedicine, vol. 21, p. 245–251, 2023, ISSN: 15548716, (Publisher: Interactive Media Institute).
Résumé | Liens | BibTeX | Étiquettes: article, clinical monitoring, cognition, controlled study, cybersickness, female, human, human experiment, intensive care, intensive care unit, interview, male, normal human, nursing student, psychological aspect, qualitative analysis, qualitative research, recovery room, skill, virtual reality
@article{banville_using_2023,
title = {Using Virtual Reality to Improve Nurses’ Students’ Clinical Surveillance in a Critical Care Context: A Psychological Perspective on Learning},
author = {F. Banville and D. Milhomme and A. Perron and J. Pinard and J. Houle and D. Therrien and G. Peguero-Rodriguez and S. Charette and B. -A. Ménélas and M. Trépanier and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85182468511&partnerID=40&md5=65f6f32f45ade940105c06386edd7a1c},
issn = {15548716},
year = {2023},
date = {2023-01-01},
journal = {Annual Review of CyberTherapy and Telemedicine},
volume = {21},
pages = {245–251},
abstract = {Nurse’s clinical judgement is important to provide optimal and safe care, particularly in a critical care unit. Clinical surveillance is an activity that nurses use every day and which requires crucial components to manage patients' risk of complications. To carry out this process, several cognitive functions and psychological attitudes are needed such as information and attention processing, judgement, decision-making, stress, and anxiety regulation. Since 2018, Milhomme, Banville et al. have been working to develop a Virtual Care Unit (VCU), using immersive virtual reality, intended to train future nurses to improve their competence towards clinical surveillance process skills. The aim of this qualitative descriptive study was to determine the pertinence to use VCU simulation with graduating nurses’ students to improve clinical surveillance skills in a critical care context. Thirteen nursing students were recruited to test the scenario through the VCU. Participants were instructed to carry surveillance process on a specific patient who suffer of an instability after a surgery. An interview guide of 11 questions was used for the data collection. The results show there are 10 facilitating and 9 restricting factors in the VCU that may play a role in nursing students’ learning clinical surveillance processes. Among these elements, four of them have an important link with a psychological perspective: 1) sense of presence; 2) cybersickness; 3) reflexive environment; 4) stress reduction. Results show an important contribution of several cognitive function in the clinical surveillance process learning by the virtual reality technology. © 2023, Interactive Media Institute. All rights reserved.},
note = {Publisher: Interactive Media Institute},
keywords = {article, clinical monitoring, cognition, controlled study, cybersickness, female, human, human experiment, intensive care, intensive care unit, interview, male, normal human, nursing student, psychological aspect, qualitative analysis, qualitative research, recovery room, skill, virtual reality},
pubstate = {published},
tppubtype = {article}
}
Maïano, C.; Morin, A. J. S.; Gagnon, C.; Olivier, E.; Tracey, D.; Craven, R. G.; Bouchard, S.
Validation of an Adapted Version of the Glasgow Anxiety Scale for People with Intellectual Disabilities (GAS-ID) Article de journal
Dans: Journal of Autism and Developmental Disorders, vol. 53, no 4, p. 1560–1572, 2023, ISSN: 01623257, (Publisher: Springer).
Résumé | Liens | BibTeX | Étiquettes: Adolescent, adult, Anxiety, anxiety assessment, article, Australia, autism, Autism Spectrum Disorder, Canada, Child, confirmatory factor analysis, controlled study, convergent validity, emotion assessment, English (language), exploratory structural equation modeling, female, French (language), glasgow anxiety scale, human, Humans, instrument validation, Intellectual Disability, intellectual impairment, intelligence quotient, loneliness, major clinical study, male, Psychometrics, psychometry, reliability, reproducibility, Reproducibility of Results, school child, school loneliness scale, self description questionnaire 1, self esteem, self report, self-concept assessment, statistical analysis, validity, Young Adult
@article{maiano_validation_2023,
title = {Validation of an Adapted Version of the Glasgow Anxiety Scale for People with Intellectual Disabilities (GAS-ID)},
author = {C. Maïano and A. J. S. Morin and C. Gagnon and E. Olivier and D. Tracey and R. G. Craven and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85125069450&doi=10.1007%2fs10803-021-05398-7&partnerID=40&md5=7347eb15e719941ce5eca046eb7f4564},
doi = {10.1007/s10803-021-05398-7},
issn = {01623257},
year = {2023},
date = {2023-01-01},
journal = {Journal of Autism and Developmental Disorders},
volume = {53},
number = {4},
pages = {1560–1572},
abstract = {The objective of the study was to validate adapted versions of the Glasgow Anxiety Scale for people with Intellectual Disabilities (GAS-ID) simultaneously developed in English and French. A sample of 361 youth with mild to moderate intellectual disability (ID) (M = 15.78 years) from Australia (English-speaking) and Canada (French-speaking) participated in this study. The results supported the factor validity and reliability, measurement invariance (between English and French versions), a lack of differential items functioning (as a function of youth’s age and ID level, but not sex in the English-Australian sample), temporal stability (over one year interval), and convergent validity (with global self-esteem and school loneliness) of a bi-factor exploratory structural equation modeling representation of the GAS-ID. The present study supports the psychometric properties of the English-Australian and French-Canadian versions of the adapted GAS-ID. © 2022, The Author(s), under exclusive licence to Springer Science+Business Media, LLC, part of Springer Nature.},
note = {Publisher: Springer},
keywords = {Adolescent, adult, Anxiety, anxiety assessment, article, Australia, autism, Autism Spectrum Disorder, Canada, Child, confirmatory factor analysis, controlled study, convergent validity, emotion assessment, English (language), exploratory structural equation modeling, female, French (language), glasgow anxiety scale, human, Humans, instrument validation, Intellectual Disability, intellectual impairment, intelligence quotient, loneliness, major clinical study, male, Psychometrics, psychometry, reliability, reproducibility, Reproducibility of Results, school child, school loneliness scale, self description questionnaire 1, self esteem, self report, self-concept assessment, statistical analysis, validity, Young Adult},
pubstate = {published},
tppubtype = {article}
}
Joudeh, I. O.; Cretu, A. -M.; Bouchard, S.; Guimond, S.
Prediction of Emotional States from Partial Facial Features for Virtual Reality Applications Article de journal
Dans: Annual Review of CyberTherapy and Telemedicine, vol. 21, p. 17–21, 2023, ISSN: 15548716, (Publisher: Interactive Media Institute).
Résumé | Liens | BibTeX | Étiquettes: Arousal, article, clinical article, convolutional neural network, correlation coefficient, data base, emotion, facies, female, human, human experiment, Image processing, long short term memory network, male, random forest, residual neural network, root mean squared error, videorecording, virtual reality
@article{joudeh_prediction_2023-1,
title = {Prediction of Emotional States from Partial Facial Features for Virtual Reality Applications},
author = {I. O. Joudeh and A. -M. Cretu and S. Bouchard and S. Guimond},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85182471413&partnerID=40&md5=8190e0dbb5b48ae508515f4029b0a0d1},
issn = {15548716},
year = {2023},
date = {2023-01-01},
journal = {Annual Review of CyberTherapy and Telemedicine},
volume = {21},
pages = {17–21},
abstract = {The availability of virtual reality (VR) in numerous clinical contexts has been made possible by recent technological advancements. One application is using VR for cognitive interventions with individuals who have mental disorders. Predicting the emotional states of users could help to prevent their discouragement during VR interventions. We can monitor the emotional states of individuals using sensors like an external camera, as they engage in various tasks within VR environments. The emotional state of VR users can be measured through arousal and valence, as per the Circumplex model. We used the Remote Collaborative and Affective Interactions (RECOLA) database of emotional behaviours. We processed video frames from 18 RECOLA videos. Due to the headset in VR systems, we detected faces and cropped the images of faces to use the lower half of the face only. We labeled the images with arousal and valence values to reflect various emotions. Convolutional neural networks (CNNs), specifically MobileNet-v2 and ResNets-18, were then used to predict arousal and valence values. MobileNet-v2 outperforms ResNet-18 as well as others from the literature. We achieved a root mean squared error (RMSE), Pearson’s correlation coefficient (PCC), and Concordance correlation coefficient (CCC) of 0.1495, 0.6387, and 0.6081 for arousal, and 0.0996, 0.6453, and 0.6232 for valence. Our work acts as a proof-of-concept for predicting emotional states from arousal and valence values via visual data of users immersed in VR experiences. In the future, predicted emotions could be used to automatically adjust the VR environment for individuals engaged in cognitive interventions. © 2023, Interactive Media Institute. All rights reserved.},
note = {Publisher: Interactive Media Institute},
keywords = {Arousal, article, clinical article, convolutional neural network, correlation coefficient, data base, emotion, facies, female, human, human experiment, Image processing, long short term memory network, male, random forest, residual neural network, root mean squared error, videorecording, virtual reality},
pubstate = {published},
tppubtype = {article}
}
Monthuy-Blanc, J.; Faghihi, U.; Fardshad, M. N. G.; Corno, G.; Iceta, S.; St-Pierre, M. -J.; Bouchard, S.
When Eating Intuitively Is Not Always a Positive Response: Using Machine Learning to Better Unravel Eaters Profiles Article de journal
Dans: Journal of Clinical Medicine, vol. 12, no 16, 2023, ISSN: 20770383, (Publisher: Multidisciplinary Digital Publishing Institute (MDPI)).
Résumé | Liens | BibTeX | Étiquettes: adult, aged, article, body dissatisfaction, bulimia, causal reasoning, Cluster Analysis, controlled study, coronavirus disease 2019, feeding behavior, female, health survey, human, intuitive eating, Machine learning, major clinical study, male, online analysis, pandemic, self report
@article{monthuy-blanc_when_2023,
title = {When Eating Intuitively Is Not Always a Positive Response: Using Machine Learning to Better Unravel Eaters Profiles},
author = {J. Monthuy-Blanc and U. Faghihi and M. N. G. Fardshad and G. Corno and S. Iceta and M. -J. St-Pierre and S. Bouchard},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85169079324&doi=10.3390%2fjcm12165172&partnerID=40&md5=2241ae85a40c73e19f30c5c3d10b514a},
doi = {10.3390/jcm12165172},
issn = {20770383},
year = {2023},
date = {2023-01-01},
journal = {Journal of Clinical Medicine},
volume = {12},
number = {16},
abstract = {Background: The aim of the present study was to identify eaters profiles using the latest advantages of Machine Learning approach to cluster analysis. Methods: A total of 317 participants completed an online-based survey including self-reported measures of body image dissatisfaction, bulimia, restraint, and intuitive eating. Analyses were conducted in two steps: (a) identifying an optimal number of clusters, and (b) validating the clustering model of eaters profile using a procedure inspired by the Causal Reasoning approach. Results: This study reveals a 7-cluster model of eaters profiles. The characteristics, needs, and strengths of each eater profile are discussed along with the presentation of a continuum of eaters profiles. Conclusions: This conceptualization of eaters profiles could guide the direction of health education and treatment interventions targeting perceptual and eating dimensions. © 2023 by the authors.},
note = {Publisher: Multidisciplinary Digital Publishing Institute (MDPI)},
keywords = {adult, aged, article, body dissatisfaction, bulimia, causal reasoning, Cluster Analysis, controlled study, coronavirus disease 2019, feeding behavior, female, health survey, human, intuitive eating, Machine learning, major clinical study, male, online analysis, pandemic, self report},
pubstate = {published},
tppubtype = {article}
}