Maldonado, Alberto Melchor; Daher, Salam
Tracking Eye Gaze of Trainees in Virtual Patient Simulations Conference
Great Minds in STEM, 2020.
@conference{melchor2020tracking1,
title = {Tracking Eye Gaze of Trainees in Virtual Patient Simulations},
author = {Alberto Melchor Maldonado and Salam Daher},
url = {https://drive.google.com/file/d/13v1dc9uCqxCpLpdEI34p-2zUshqO1ucL/view?usp=sharing},
year = {2020},
date = {2020-10-01},
urldate = {2020-10-01},
booktitle = {Great Minds in STEM},
abstract = {It is important to detect where exactly medical trainees are looking. There are various regions a trainee may focus on, therefore eye-tracking on trainees may improve training. I created a unity package to record eye movement during simulations.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Sanchez, Vivian; Daher, Salam
Automating Virtual Patients Responses for Medical Simulation Conference
Great Minds in STEM, 2020.
@conference{sanchez2020automating,
title = {Automating Virtual Patients Responses for Medical Simulation},
author = {Vivian Sanchez and Salam Daher},
url = {https://drive.google.com/file/d/1wE61kpo1ljqZksb6jb4r4xfILhb7J0AA/view?usp=sharing},
year = {2020},
date = {2020-10-01},
urldate = {2020-10-01},
booktitle = {Great Minds in STEM},
abstract = {Voice assistants are not being used enough in healthcare. Health students train on simulators that requires members of faculty to control patient responses. This takes up resources and limits the students to certain training times. We created an Alexa skill for a stroke patient scenario, connected it to a 3D character, and explored the capabilities and limitations of the Amazon Alexa. Healthcare students can then interact with the Alexa patient without the faculty feeding the answers to the patient. By supplementing existing simulations with an automated way to respond while still providing controlled answers can allow more doctors and nurses to practice without waiting for an available instructor.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Maldonado, Alberto Melchor; Daher, Salam
Tracking Eye Gaze in Unity3D for Training. Conference
Great Minds in STEM, 2020.
@conference{melchor2020tracking2,
title = {Tracking Eye Gaze in Unity3D for Training.},
author = {Alberto Melchor Maldonado and Salam Daher},
year = {2020},
date = {2020-10-01},
urldate = {2020-10-01},
booktitle = {Great Minds in STEM},
pages = {5},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Daher, Salam; Gonzalez, Laura
Collaborations with Nursing Experts for Virtual Agent Research Conference
ACM Intelligent Virtual Agent, Glasgow, UK Virtual, 2020.
@conference{daher2020collaborations,
title = {Collaborations with Nursing Experts for Virtual Agent Research},
author = {Salam Daher and Laura Gonzalez},
url = {PDF available upon request.},
year = {2020},
date = {2020-10-01},
booktitle = {ACM Intelligent Virtual Agent},
address = {Glasgow, UK Virtual},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Daher, Salam; Hochreiter, Jason; Schubert, Ryan; Gonzalez, Laura; Cendan, Juan; Anderson, Mindi; Diaz, Desiree; Welch, Gregory
Physical-Virtual Patient: A new patient simulator Journal Article
In: Society of Simulation in Healthcare Journal, vol. 15, iss. 2, pp. 115-121, 2020.
@article{daher2020physical,
title = {Physical-Virtual Patient: A new patient simulator},
author = {Salam Daher and Jason Hochreiter and Ryan Schubert and Laura Gonzalez and Juan Cendan and Mindi Anderson and Desiree Diaz and Gregory Welch },
url = {https://journals.lww.com/simulationinhealthcare/fulltext/2020/04000/the_physical_virtual_patient_simulator__a_physical.9.aspx},
doi = {10.1097/SIH.0000000000000409},
year = {2020},
date = {2020-04-01},
urldate = {2020-04-01},
journal = {Society of Simulation in Healthcare Journal},
volume = {15},
issue = {2},
pages = {115-121},
abstract = {Introduction: We introduce a new type of patient simulator referred to as the Physical-Virtual Patient Simulator (PVPS). The PVPS combines the tangible characteristics of a human-shaped physical form with the flexibility and richness of a virtual patient. The PVPS can exhibit a range of multisensory cues, including visual cues (eg, capillary refill, facial expressions, appearance changes), auditory cues (eg, verbal responses, heart sounds), and tactile cues (eg, localized temperature, pulse).
Methods: We describe the implementation of the technology, technical testing with healthcare experts, and an institutional review board–approved pilot experiment involving 22 nurse practitioner students interacting with a simulated child in 2 scenarios: sepsis and child abuse. The nurse practitioners were asked qualitative questions about ease of use and the cues they noticed.
Results: Participants found it easy to interact with the PVPS and had mixed but encouraging responses regarding realism. In the sepsis scenario, participants reported the following cues leading to their diagnoses: temperature, voice, mottled skin, attitude and facial expressions, breathing and cough, vitals and oxygen saturation, and appearance of the mouth and tongue. For the child abuse scenario, they reported the skin appearance on the arms and abdomen, perceived attitude, facial expressions, and inconsistent stories.
Conclusions: We are encouraged by the initial results and user feedback regarding the perceived realism of visual (eg, mottling), audio (eg, breathing sounds), and tactile (eg, temperature) cues displayed by the PVPS, and ease of interaction with the simulator.(Sim Healthcare 15:115–121, 2020)},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Methods: We describe the implementation of the technology, technical testing with healthcare experts, and an institutional review board–approved pilot experiment involving 22 nurse practitioner students interacting with a simulated child in 2 scenarios: sepsis and child abuse. The nurse practitioners were asked qualitative questions about ease of use and the cues they noticed.
Results: Participants found it easy to interact with the PVPS and had mixed but encouraging responses regarding realism. In the sepsis scenario, participants reported the following cues leading to their diagnoses: temperature, voice, mottled skin, attitude and facial expressions, breathing and cough, vitals and oxygen saturation, and appearance of the mouth and tongue. For the child abuse scenario, they reported the skin appearance on the arms and abdomen, perceived attitude, facial expressions, and inconsistent stories.
Conclusions: We are encouraged by the initial results and user feedback regarding the perceived realism of visual (eg, mottling), audio (eg, breathing sounds), and tactile (eg, temperature) cues displayed by the PVPS, and ease of interaction with the simulator.(Sim Healthcare 15:115–121, 2020)
2020
Alberto Melchor Maldonado, Salam Daher
Tracking Eye Gaze of Trainees in Virtual Patient Simulations Conference
Great Minds in STEM, 2020.
Abstract | Links | BibTeX | Tags: 2020, Alberto Melchor Maldonado, Salam Daher
@conference{melchor2020tracking1,
title = {Tracking Eye Gaze of Trainees in Virtual Patient Simulations},
author = {Alberto Melchor Maldonado and Salam Daher},
url = {https://drive.google.com/file/d/13v1dc9uCqxCpLpdEI34p-2zUshqO1ucL/view?usp=sharing},
year = {2020},
date = {2020-10-01},
urldate = {2020-10-01},
booktitle = {Great Minds in STEM},
abstract = {It is important to detect where exactly medical trainees are looking. There are various regions a trainee may focus on, therefore eye-tracking on trainees may improve training. I created a unity package to record eye movement during simulations.},
keywords = {2020, Alberto Melchor Maldonado, Salam Daher},
pubstate = {published},
tppubtype = {conference}
}
Vivian Sanchez, Salam Daher
Automating Virtual Patients Responses for Medical Simulation Conference
Great Minds in STEM, 2020.
Abstract | Links | BibTeX | Tags: 2020, Salam Daher, sda, Vivian Sanchez
@conference{sanchez2020automating,
title = {Automating Virtual Patients Responses for Medical Simulation},
author = {Vivian Sanchez and Salam Daher},
url = {https://drive.google.com/file/d/1wE61kpo1ljqZksb6jb4r4xfILhb7J0AA/view?usp=sharing},
year = {2020},
date = {2020-10-01},
urldate = {2020-10-01},
booktitle = {Great Minds in STEM},
abstract = {Voice assistants are not being used enough in healthcare. Health students train on simulators that requires members of faculty to control patient responses. This takes up resources and limits the students to certain training times. We created an Alexa skill for a stroke patient scenario, connected it to a 3D character, and explored the capabilities and limitations of the Amazon Alexa. Healthcare students can then interact with the Alexa patient without the faculty feeding the answers to the patient. By supplementing existing simulations with an automated way to respond while still providing controlled answers can allow more doctors and nurses to practice without waiting for an available instructor.},
keywords = {2020, Salam Daher, sda, Vivian Sanchez},
pubstate = {published},
tppubtype = {conference}
}
Alberto Melchor Maldonado, Salam Daher
Tracking Eye Gaze in Unity3D for Training. Conference
Great Minds in STEM, 2020.
BibTeX | Tags: 2020, Alberto Melchor Maldonado, Eye gaze, Eye-tracking, Salam Daher, training, Unity3D
@conference{melchor2020tracking2,
title = {Tracking Eye Gaze in Unity3D for Training.},
author = {Alberto Melchor Maldonado and Salam Daher},
year = {2020},
date = {2020-10-01},
urldate = {2020-10-01},
booktitle = {Great Minds in STEM},
pages = {5},
keywords = {2020, Alberto Melchor Maldonado, Eye gaze, Eye-tracking, Salam Daher, training, Unity3D},
pubstate = {published},
tppubtype = {conference}
}
Salam Daher, Laura Gonzalez
Collaborations with Nursing Experts for Virtual Agent Research Conference
ACM Intelligent Virtual Agent, Glasgow, UK Virtual, 2020.
Links | BibTeX | Tags: 2020, Laura Gonzalez, Salam Daher
@conference{daher2020collaborations,
title = {Collaborations with Nursing Experts for Virtual Agent Research},
author = {Salam Daher and Laura Gonzalez},
url = {PDF available upon request.},
year = {2020},
date = {2020-10-01},
booktitle = {ACM Intelligent Virtual Agent},
address = {Glasgow, UK Virtual},
keywords = {2020, Laura Gonzalez, Salam Daher},
pubstate = {published},
tppubtype = {conference}
}
Salam Daher, Jason Hochreiter, Ryan Schubert, Laura Gonzalez, Juan Cendan, Mindi Anderson, Desiree Diaz, Gregory Welch
Physical-Virtual Patient: A new patient simulator Journal Article
In: Society of Simulation in Healthcare Journal, vol. 15, iss. 2, pp. 115-121, 2020.
Abstract | Links | BibTeX | Tags: 2020, Desiree Diaz, development, evaluation, Gregory F Welch, Jason Hochreiter, Juan Cendan, Laura Gonzalez, Mindi Anderson, pediatric patient simulation, physical-virtual patient simulator, pilot study, pvp, Ryan Schubert, Salam Daher, sepsis
@article{daher2020physical,
title = {Physical-Virtual Patient: A new patient simulator},
author = {Salam Daher and Jason Hochreiter and Ryan Schubert and Laura Gonzalez and Juan Cendan and Mindi Anderson and Desiree Diaz and Gregory Welch },
url = {https://journals.lww.com/simulationinhealthcare/fulltext/2020/04000/the_physical_virtual_patient_simulator__a_physical.9.aspx},
doi = {10.1097/SIH.0000000000000409},
year = {2020},
date = {2020-04-01},
urldate = {2020-04-01},
journal = {Society of Simulation in Healthcare Journal},
volume = {15},
issue = {2},
pages = {115-121},
abstract = {Introduction: We introduce a new type of patient simulator referred to as the Physical-Virtual Patient Simulator (PVPS). The PVPS combines the tangible characteristics of a human-shaped physical form with the flexibility and richness of a virtual patient. The PVPS can exhibit a range of multisensory cues, including visual cues (eg, capillary refill, facial expressions, appearance changes), auditory cues (eg, verbal responses, heart sounds), and tactile cues (eg, localized temperature, pulse).
Methods: We describe the implementation of the technology, technical testing with healthcare experts, and an institutional review board–approved pilot experiment involving 22 nurse practitioner students interacting with a simulated child in 2 scenarios: sepsis and child abuse. The nurse practitioners were asked qualitative questions about ease of use and the cues they noticed.
Results: Participants found it easy to interact with the PVPS and had mixed but encouraging responses regarding realism. In the sepsis scenario, participants reported the following cues leading to their diagnoses: temperature, voice, mottled skin, attitude and facial expressions, breathing and cough, vitals and oxygen saturation, and appearance of the mouth and tongue. For the child abuse scenario, they reported the skin appearance on the arms and abdomen, perceived attitude, facial expressions, and inconsistent stories.
Conclusions: We are encouraged by the initial results and user feedback regarding the perceived realism of visual (eg, mottling), audio (eg, breathing sounds), and tactile (eg, temperature) cues displayed by the PVPS, and ease of interaction with the simulator.(Sim Healthcare 15:115–121, 2020)},
keywords = {2020, Desiree Diaz, development, evaluation, Gregory F Welch, Jason Hochreiter, Juan Cendan, Laura Gonzalez, Mindi Anderson, pediatric patient simulation, physical-virtual patient simulator, pilot study, pvp, Ryan Schubert, Salam Daher, sepsis},
pubstate = {published},
tppubtype = {article}
}
Methods: We describe the implementation of the technology, technical testing with healthcare experts, and an institutional review board–approved pilot experiment involving 22 nurse practitioner students interacting with a simulated child in 2 scenarios: sepsis and child abuse. The nurse practitioners were asked qualitative questions about ease of use and the cues they noticed.
Results: Participants found it easy to interact with the PVPS and had mixed but encouraging responses regarding realism. In the sepsis scenario, participants reported the following cues leading to their diagnoses: temperature, voice, mottled skin, attitude and facial expressions, breathing and cough, vitals and oxygen saturation, and appearance of the mouth and tongue. For the child abuse scenario, they reported the skin appearance on the arms and abdomen, perceived attitude, facial expressions, and inconsistent stories.
Conclusions: We are encouraged by the initial results and user feedback regarding the perceived realism of visual (eg, mottling), audio (eg, breathing sounds), and tactile (eg, temperature) cues displayed by the PVPS, and ease of interaction with the simulator.(Sim Healthcare 15:115–121, 2020)