User-State Sensing for Virtual Health Agents and TeleHealth Applications (bibtex)
@inproceedings{gratch_user-state_2013,
	address = {San Diego, CA},
	title = {User-{State} {Sensing} for {Virtual} {Health} {Agents} and {TeleHealth} {Applications}},
	url = {http://ict.usc.edu/pubs/User-State%20Sensing%20for%20Virtual%20Health%20Agents%20and%20TeleHealth%20Applications.pdf},
	abstract = {Nonverbal behaviors play a crucial role in shaping outcomes in face-to- face clinical interactions. Experienced clinicians use nonverbals to foster rapport and “read” their clients to inform diagnoses. The rise of telemedicine and virtual health agents creates new opportunities, but it also strips away much of this non- verbal channel. Recent advances in low-cost computer vision and sensing technol- ogies have the potential to address this challenge by learning to recognize nonver- bal cues from large datasets of clinical interactions. These techniques can enhance both telemedicine and the emerging technology of virtual health agents. This arti- cle describes our current research in addressing these challenges in the domain of PTSD and depression screening for U.S. Veterans. We describe our general ap- proach and report on our initial contribution: the creation of a large dataset of clin- ical interview data that facilitates the training of user-state sensing technology.},
	booktitle = {Medicine {Meets} {Virtual} {Reality}},
	author = {Gratch, Jonathan and Morency, Louis-Philippe and Scherer, Stefan and Stratou, Giota and Boberg, Jill and Koenig, Sebastian and Adamson, Todd and Rizzo, Albert},
	month = feb,
	year = {2013},
	keywords = {MedVR, UARC, Virtual Humans}
}
Powered by bibtexbrowser