{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,17]],"date-time":"2026-03-17T17:04:33Z","timestamp":1773767073871,"version":"3.50.1"},"publisher-location":"New York, NY, USA","reference-count":74,"publisher":"ACM","license":[{"start":{"date-parts":[[2021,5,6]],"date-time":"2021-05-06T00:00:00Z","timestamp":1620259200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/creativecommons.org\/licenses\/by\/4.0\/"}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2021,5,6]]},"DOI":"10.1145\/3411764.3445582","type":"proceedings-article","created":{"date-parts":[[2021,5,8]],"date-time":"2021-05-08T05:53:19Z","timestamp":1620453199000},"page":"1-12","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":36,"title":["Pose-on-the-Go: Approximating User Pose with Smartphone Sensor Fusion and Inverse Kinematics"],"prefix":"10.1145","author":[{"given":"Karan","family":"Ahuja","sequence":"first","affiliation":[{"name":"Human-Computer Interaction Institute Carnegie Mellon University, United States"}]},{"given":"Sven","family":"Mayer","sequence":"additional","affiliation":[{"name":"Human-Computer Interaction Institute Carnegie Mellon University, United States"}]},{"given":"Mayank","family":"Goel","sequence":"additional","affiliation":[{"name":"School of Computer Science Carnegie Mellon University, United States"}]},{"given":"Chris","family":"Harrison","sequence":"additional","affiliation":[{"name":"Human-Computer Interaction Institute Carnegie Mellon University, United States"}]}],"member":"320","published-online":{"date-parts":[[2021,5,7]]},"reference":[{"key":"e_1_3_2_2_1_1","doi-asserted-by":"publisher","DOI":"10.1145\/3385959.3418452"},{"key":"e_1_3_2_2_2_1","doi-asserted-by":"publisher","DOI":"10.1145\/3332165.3347889"},{"key":"e_1_3_2_2_3_1","doi-asserted-by":"publisher","DOI":"10.1145\/3379337.3415588"},{"key":"e_1_3_2_2_4_1","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2018.00762"},{"key":"e_1_3_2_2_5_1","unstructured":"ALT LLC. 2020. Antilatency. Retrieved 2020 from https:\/\/antilatency.com\/  ALT LLC. 2020. Antilatency. Retrieved 2020 from https:\/\/antilatency.com\/"},{"key":"e_1_3_2_2_6_1","doi-asserted-by":"publisher","DOI":"10.1145\/3274247.3274511"},{"key":"e_1_3_2_2_7_1","unstructured":"Apple Inc.2020. Apple Developer - ARFaceAnchor. Retrieved 2020 from https:\/\/developer.apple.com\/documentation\/arkit\/arfaceanchor  Apple Inc.2020. Apple Developer - ARFaceAnchor. Retrieved 2020 from https:\/\/developer.apple.com\/documentation\/arkit\/arfaceanchor"},{"key":"e_1_3_2_2_8_1","unstructured":"Apple Inc.2020. Apple Developer - CoreMotion Activity. Retrieved 2020 from https:\/\/developer.apple.com\/documentation\/coremotion\/cmmotionactivity  Apple Inc.2020. Apple Developer - CoreMotion Activity. Retrieved 2020 from https:\/\/developer.apple.com\/documentation\/coremotion\/cmmotionactivity"},{"key":"e_1_3_2_2_9_1","unstructured":"Apple Inc.2020. Apple Developer - CoreMotion Pedometer. Retrieved 2020 from https:\/\/developer.apple.com\/documentation\/coremotion\/cmpedometerdata  Apple Inc.2020. Apple Developer - CoreMotion Pedometer. Retrieved 2020 from https:\/\/developer.apple.com\/documentation\/coremotion\/cmpedometerdata"},{"key":"e_1_3_2_2_10_1","unstructured":"Apple Inc.2020. Support - Animoji. Retrieved 2020 from https:\/\/support.apple.com\/en-au\/HT208190  Apple Inc.2020. Support - Animoji. Retrieved 2020 from https:\/\/support.apple.com\/en-au\/HT208190"},{"key":"e_1_3_2_2_11_1","doi-asserted-by":"publisher","DOI":"10.1145\/3334480.3382962"},{"key":"e_1_3_2_2_12_1","doi-asserted-by":"publisher","DOI":"10.1109\/FG.2018.00019"},{"key":"e_1_3_2_2_13_1","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-540-24646-6_1"},{"key":"e_1_3_2_2_14_1","doi-asserted-by":"publisher","DOI":"10.1145\/223904.223935"},{"key":"e_1_3_2_2_15_1","doi-asserted-by":"publisher","DOI":"10.1145\/1031607.1031666"},{"key":"e_1_3_2_2_16_1","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2017.143"},{"key":"e_1_3_2_2_17_1","doi-asserted-by":"publisher","DOI":"10.1145\/2858036.2858125"},{"key":"e_1_3_2_2_18_1","doi-asserted-by":"publisher","DOI":"10.3390\/s20041074"},{"key":"e_1_3_2_2_19_1","doi-asserted-by":"publisher","DOI":"10.1145\/2628363.2628402"},{"key":"e_1_3_2_2_20_1","doi-asserted-by":"publisher","DOI":"10.1109\/ICASSP.2017.7952187"},{"key":"e_1_3_2_2_21_1","unstructured":"Muybridge Eadweard. 1878. The Horse in Motion.  Muybridge Eadweard. 1878. The Horse in Motion."},{"key":"e_1_3_2_2_22_1","unstructured":"Facebook Technologies LLC. 2020. Oculus Quest. Retrieved 2020 from https:\/\/www.oculus.com\/quest  Facebook Technologies LLC. 2020. Oculus Quest. Retrieved 2020 from https:\/\/www.oculus.com\/quest"},{"key":"e_1_3_2_2_23_1","doi-asserted-by":"publisher","DOI":"10.1007\/s11042-015-2944-3"},{"key":"e_1_3_2_2_24_1","doi-asserted-by":"publisher","DOI":"10.1109\/ISWC.2000.888482"},{"key":"e_1_3_2_2_25_1","doi-asserted-by":"publisher","DOI":"10.1145\/2019406.2019424"},{"key":"e_1_3_2_2_26_1","volume-title":"Proxemics: The study of man\u2019s spatial relations.","author":"Hall Edward\u00a0Twitchell","year":"1962","unstructured":"Edward\u00a0Twitchell Hall . 1962 . Proxemics: The study of man\u2019s spatial relations. Edward\u00a0Twitchell Hall. 1962. Proxemics: The study of man\u2019s spatial relations."},{"key":"e_1_3_2_2_27_1","doi-asserted-by":"publisher","DOI":"10.1145\/2047196.2047255"},{"key":"e_1_3_2_2_28_1","doi-asserted-by":"publisher","DOI":"10.1145\/2047196.2047279"},{"key":"e_1_3_2_2_29_1","doi-asserted-by":"crossref","unstructured":"Gregor Hofer Junichi Yamagishi and Hiroshi Shimodaira. 2008. Speech-driven lip motion generation with a trajectory HMM. (2008).  Gregor Hofer Junichi Yamagishi and Hiroshi Shimodaira. 2008. Speech-driven lip motion generation with a trajectory HMM. (2008).","DOI":"10.21437\/Interspeech.2008-591"},{"key":"e_1_3_2_2_30_1","unstructured":"Notch\u00a0Interfaces Inc.2020. Notch Interfaces. Retrieved 2020 from https:\/\/wearnotch.com\/  Notch\u00a0Interfaces Inc.2020. Notch Interfaces. Retrieved 2020 from https:\/\/wearnotch.com\/"},{"key":"e_1_3_2_2_31_1","unstructured":"Intel Corporation. 2020. RealSense. Retrieved 2020 from https:\/\/www.intelrealsense.com\/  Intel Corporation. 2020. RealSense. Retrieved 2020 from https:\/\/www.intelrealsense.com\/"},{"key":"e_1_3_2_2_32_1","doi-asserted-by":"publisher","DOI":"10.1145\/985692.985693"},{"key":"e_1_3_2_2_33_1","doi-asserted-by":"publisher","DOI":"10.1145\/3161199"},{"key":"e_1_3_2_2_34_1","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2014.241"},{"key":"e_1_3_2_2_35_1","doi-asserted-by":"publisher","DOI":"10.1145\/2380116.2380139"},{"key":"e_1_3_2_2_36_1","doi-asserted-by":"publisher","DOI":"10.1145\/3379337.3415889"},{"key":"e_1_3_2_2_37_1","doi-asserted-by":"publisher","DOI":"10.1145\/3301275.3302295"},{"key":"e_1_3_2_2_38_1","doi-asserted-by":"publisher","DOI":"10.1109\/ICRA.2013.6631398"},{"key":"e_1_3_2_2_39_1","doi-asserted-by":"publisher","DOI":"10.1145\/3132272.3134130"},{"key":"e_1_3_2_2_40_1","unstructured":"Meta Motion. 2018. Gypsy Motion Capture System. Retrieved 2021 from http:\/\/metamotion.com\/gypsy\/gypsy-motion-capture-system.htm  Meta Motion. 2018. Gypsy Motion Capture System. Retrieved 2021 from http:\/\/metamotion.com\/gypsy\/gypsy-motion-capture-system.htm"},{"key":"e_1_3_2_2_41_1","unstructured":"Microsoft Corporation. 2010. Microsoft Kinect. Retrieved 2021 from https:\/\/en.wikipedia.org\/wiki\/Kinect  Microsoft Corporation. 2010. Microsoft Kinect. Retrieved 2021 from https:\/\/en.wikipedia.org\/wiki\/Kinect"},{"key":"e_1_3_2_2_42_1","unstructured":"Microsoft Corporation. 2010. Microsoft Kinect Games. Retrieved 2021 from https:\/\/en.wikipedia.org\/wiki\/Category:Kinect_games  Microsoft Corporation. 2010. Microsoft Kinect Games. Retrieved 2021 from https:\/\/en.wikipedia.org\/wiki\/Category:Kinect_games"},{"key":"e_1_3_2_2_43_1","unstructured":"Microsoft Corporation. 2019. HoloLens. Retrieved 2021 from https:\/\/www.microsoft.com\/en-us\/hololens  Microsoft Corporation. 2019. HoloLens. Retrieved 2021 from https:\/\/www.microsoft.com\/en-us\/hololens"},{"key":"e_1_3_2_2_44_1","doi-asserted-by":"publisher","DOI":"10.1109\/ICHR.2004.1442670"},{"key":"e_1_3_2_2_45_1","unstructured":"NaturalPoint Inc.2020. OptiTrack. Retrieved 2020 from http:\/\/optitrack.com  NaturalPoint Inc.2020. OptiTrack. Retrieved 2020 from http:\/\/optitrack.com"},{"key":"e_1_3_2_2_46_1","doi-asserted-by":"publisher","DOI":"10.2312\/egve.20151311"},{"key":"e_1_3_2_2_47_1","unstructured":"Northern Digital Inc. 2020. trakSTAR. Retrieved 2020 from https:\/\/www.ndigital.com\/msci\/products\/drivebay-trakstar\/  Northern Digital Inc. 2020. trakSTAR. Retrieved 2020 from https:\/\/www.ndigital.com\/msci\/products\/drivebay-trakstar\/"},{"key":"e_1_3_2_2_48_1","unstructured":"OpenNI. 2020. OpenNI. Retrieved 2020 from https:\/\/structure.io\/openni  OpenNI. 2020. OpenNI. Retrieved 2020 from https:\/\/structure.io\/openni"},{"key":"e_1_3_2_2_49_1","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-030-01264-9_17"},{"key":"e_1_3_2_2_50_1","doi-asserted-by":"publisher","DOI":"10.1145\/3281505.3281529"},{"key":"e_1_3_2_2_51_1","unstructured":"PhaseSpace Inc.2020. PhaseSpace. Retrieved 2020 from https:\/\/phasespace.com\/  PhaseSpace Inc.2020. PhaseSpace. Retrieved 2020 from https:\/\/phasespace.com\/"},{"key":"e_1_3_2_2_52_1","doi-asserted-by":"publisher","DOI":"10.1145\/3173574.3173620"},{"key":"e_1_3_2_2_53_1","unstructured":"Polhemus. 2020. Polhemus. Retrieved 2020 from https:\/\/polhemus.com\/case-study\/detail\/polhemus-motion-capture-system-is-used-to-measure-real-time-motion-analysis  Polhemus. 2020. Polhemus. Retrieved 2020 from https:\/\/polhemus.com\/case-study\/detail\/polhemus-motion-capture-system-is-used-to-measure-real-time-motion-analysis"},{"key":"e_1_3_2_2_54_1","doi-asserted-by":"publisher","DOI":"10.1145\/2980179.2980235"},{"key":"e_1_3_2_2_55_1","doi-asserted-by":"publisher","DOI":"10.1145\/3304109.3306236"},{"key":"e_1_3_2_2_56_1","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-319-16178-5_25"},{"key":"e_1_3_2_2_57_1","unstructured":"Root Motion. 2020. FINAL IK - VRIK Solver Locomotion. Retrieved 2020 from http:\/\/www.root-motion.com\/finalikdox\/html\/page16.html  Root Motion. 2020. FINAL IK - VRIK Solver Locomotion. Retrieved 2020 from http:\/\/www.root-motion.com\/finalikdox\/html\/page16.html"},{"key":"e_1_3_2_2_58_1","unstructured":"Root Motion. 2020. Root Motion. Retrieved 2020 from http:\/\/root-motion.com\/  Root Motion. 2020. Root Motion. Retrieved 2020 from http:\/\/root-motion.com\/"},{"key":"e_1_3_2_2_59_1","doi-asserted-by":"publisher","DOI":"10.1145\/2906388.2906407"},{"key":"e_1_3_2_2_60_1","doi-asserted-by":"publisher","DOI":"10.1145\/1964921.1964926"},{"key":"e_1_3_2_2_61_1","unstructured":"Snap Inc.2020. Snapchat Lenses. Retrieved 2020 from https:\/\/lensstudio.snapchat.com\/lenses\/  Snap Inc.2020. Snapchat Lenses. Retrieved 2020 from https:\/\/lensstudio.snapchat.com\/lenses\/"},{"key":"e_1_3_2_2_62_1","doi-asserted-by":"publisher","DOI":"10.1145\/1476589.1476686"},{"key":"e_1_3_2_2_63_1","doi-asserted-by":"publisher","DOI":"10.1145\/1966394.1966397"},{"key":"e_1_3_2_2_64_1","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2019.00782"},{"key":"e_1_3_2_2_65_1","unstructured":"Unity Technologies. 2020. Unity. Retrieved 2020 from https:\/\/unity.com\/  Unity Technologies. 2020. Unity. Retrieved 2020 from https:\/\/unity.com\/"},{"key":"e_1_3_2_2_66_1","unstructured":"Verhaert. 2020. Verhaert. Retrieved 2020 from https:\/\/verhaert.com\/  Verhaert. 2020. Verhaert. Retrieved 2020 from https:\/\/verhaert.com\/"},{"key":"e_1_3_2_2_67_1","unstructured":"Vicon Motion Systems Ltd. 2020. Vicon. Retrieved 2020 from https:\/\/vicon.com\/  Vicon Motion Systems Ltd. 2020. Vicon. Retrieved 2020 from https:\/\/vicon.com\/"},{"key":"e_1_3_2_2_68_1","unstructured":"Vive. 2020. HTC VIVE. Retrieved 2020 from https:\/\/www.vive.com\/  Vive. 2020. HTC VIVE. Retrieved 2020 from https:\/\/www.vive.com\/"},{"key":"e_1_3_2_2_69_1","doi-asserted-by":"publisher","DOI":"10.1145\/1276377.1276421"},{"key":"e_1_3_2_2_70_1","doi-asserted-by":"publisher","DOI":"10.1145\/2817721.2817737"},{"key":"e_1_3_2_2_71_1","unstructured":"Xsens. 2020. Motion Capture. Retrieved 2020 from https:\/\/www.xsens.com\/motion-capture  Xsens. 2020. Motion Capture. Retrieved 2020 from https:\/\/www.xsens.com\/motion-capture"},{"key":"e_1_3_2_2_72_1","doi-asserted-by":"publisher","DOI":"10.1109\/TVCG.2019.2898650"},{"key":"e_1_3_2_2_73_1","doi-asserted-by":"publisher","DOI":"10.1002\/rob.20050"},{"key":"e_1_3_2_2_74_1","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2018.00768"}],"event":{"name":"CHI '21: CHI Conference on Human Factors in Computing Systems","location":"Yokohama Japan","acronym":"CHI '21","sponsor":["SIGCHI ACM Special Interest Group on Computer-Human Interaction"]},"container-title":["Proceedings of the 2021 CHI Conference on Human Factors in Computing Systems"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3411764.3445582","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3411764.3445582","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,6,17]],"date-time":"2025-06-17T21:28:37Z","timestamp":1750195717000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3411764.3445582"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2021,5,6]]},"references-count":74,"alternative-id":["10.1145\/3411764.3445582","10.1145\/3411764"],"URL":"https:\/\/doi.org\/10.1145\/3411764.3445582","relation":{},"subject":[],"published":{"date-parts":[[2021,5,6]]},"assertion":[{"value":"2021-05-07","order":2,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}