@inproceedings{0be046f779fb4797bb10252931a15e57,
title = "I know your next move: action decisions in dyadic pick and place tasks",
abstract = "Joint pick and place tasks occur in many interpersonal scenarios, such as when two people pick up and pass dishes. Previous studies have demonstrated that low-dimensional models can accurately capture the dynamics of pick and place motor behaviors in a controlled 2D environment. The current study models the dynamics of pick-up and pass decisions within a less restrictive virtual reality mediated 3D joint pick and place task. Findings indicate that reach-normalized distance measures, between participants and objects/targets, could accurately predict pick-up and pass decisions. Findings also reveal that participants took longer to pick-up objects where division of labor boundaries were less obvious and tended to pass in locations maximizing the dyad{\textquoteright}s efficiency. This study supports the notion that individuals are more likely to engage in interpersonal behavior when a task goal is perceived as difficult or unattainable (i.e., not afforded). Implications of findings for human-artificial agent interactions are discussed.",
keywords = "affordances, joint action, pick and place tasks, decision making, virtual reality",
author = "Diana Babajanyan and Gaurav Patil and Maurice Lamb and Kallen, {Rachel W.} and Richardson, {Michael J.}",
note = "Copyright the Author(s) 2022. Version archived for private and non-commercial use with the permission of the author/s and according to publisher conditions. For further rights please contact the publisher. ; Annual Meeting of the Cognitive Science Society (44th : 2022), CogSci 2022 ; Conference date: 27-07-2022 Through 30-07-2022",
year = "2022",
language = "English",
series = "Proceedings of the Annual Meeting of the Cognitive Science Society",
publisher = "Cognitive Science Society",
pages = "563--570",
editor = "Jennifer Culbertson and Andrew Perfors and Hugh Rabagliati and Veronica Ramenzoni",
booktitle = "CogSci2022",
}