Detection and Computational Analysis of Psychological Signals Using a Virtual Human Interviewing Agent (bibtex)
@inproceedings{rizzo_detection_2014,
	address = {Gothenburg, Sweden},
	title = {Detection and {Computational} {Analysis} of {Psychological} {Signals} {Using} a {Virtual} {Human} {Interviewing} {Agent}},
	shorttitle = {Detection of {Psychological} {Signals}},
	url = {http://ict.usc.edu/pubs/Detection%20and%20Computational%20Analysis%20of%20Psychological%20Signals%20Using%20a%20Virtual%20Human%20Interviewing%20Agent.pdf},
	abstract = {It has long been recognized that facial expressions, body posture/gestures and vocal parameters play an important role in human communication and the implicit signalling of emotion. Recent advances in low cost computer vision and behavioral sensing technologies can now be applied to the process of making meaningful inferences as to user state when a person interacts with a computational device. Effective use of this additive information could serve to promote human interaction with virtual human (VH) agents that may enhance diagnostic assessment. This paper will focus on our current research in these areas within the DARPA-funded “Detection and Computational Analysis of Psychological Signals” project, with specific attention to the SimSensei application use case. SimSensei is a virtual human interaction platform that is able to sense and interpret real-time audiovisual behavioral signals from users interacting with the system. It is specifically designed for health care support and leverages years of virtual human research and development at USC-ICT. The platform enables an engaging face-to-face interaction where the virtual human automatically reacts to the state and inferred intent of the user through analysis of behavioral signals gleaned from facial expressions, body gestures and vocal parameters. Akin to how non-verbal behavioral signals have an impact on human to human interaction and communication, SimSensei aims to capture and infer from user non-verbal communication to improve engagement between a VH and a user. The system can also quantify and interpret sensed behavioral signals.},
	booktitle = {Proceedings of {ICDVRAT} 2014},
	publisher = {International Journal of Disability and Human Development},
	author = {Rizzo, Albert and Scherer, Stefan and DeVault, David and Gratch, Jonathan and Artstein, Ron and Hartholt, Arno and Lucas, Gale and Marsella, Stacy and Morbini, Fabrizio and Nazarian, Angela and Stratou, Giota and Traum, David and Wood, Rachel and Boberg, Jill and Morency, Louis-Philippe},
	month = dec,
	year = {2014},
	keywords = {MedVR, Social Simulation, UARC, Virtual Humans}
}
Powered by bibtexbrowser