{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,2,28]],"date-time":"2026-02-28T18:20:05Z","timestamp":1772302805306,"version":"3.50.1"},"reference-count":54,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","issue":"6","license":[{"start":{"date-parts":[[2025,6,1]],"date-time":"2025-06-01T00:00:00Z","timestamp":1748736000000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2025,6,1]],"date-time":"2025-06-01T00:00:00Z","timestamp":1748736000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2025,6,1]],"date-time":"2025-06-01T00:00:00Z","timestamp":1748736000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"name":"National Key Research and Development Project of China","award":["2022ZD0160102"],"award-info":[{"award-number":["2022ZD0160102"]}]},{"name":"National Key Research and Development Project of China","award":["2021ZD0110704"],"award-info":[{"award-number":["2021ZD0110704"]}]},{"name":"Shanghai Artificial Intelligence Laboratory, XPLORER PRIZE grants"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Robot. Autom. Lett."],"published-print":{"date-parts":[[2025,6]]},"DOI":"10.1109\/lra.2025.3560871","type":"journal-article","created":{"date-parts":[[2025,4,14]],"date-time":"2025-04-14T17:41:23Z","timestamp":1744652483000},"page":"5625-5632","source":"Crossref","is-referenced-by-count":2,"title":["FoAR: Force-Aware Reactive Policy for Contact-Rich Robotic Manipulation"],"prefix":"10.1109","volume":"10","author":[{"ORCID":"https:\/\/orcid.org\/0009-0009-1520-5774","authenticated-orcid":false,"given":"Zihao","family":"He","sequence":"first","affiliation":[{"name":"University of Michigan-Shanghai Jiao Tong University Joint Institute, Shanghai Jiao Tong University, Shanghai, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-6309-1160","authenticated-orcid":false,"given":"Hongjie","family":"Fang","sequence":"additional","affiliation":[{"name":"School of Computer Science, Shanghai Jiao Tong University, Shanghai, China"}]},{"ORCID":"https:\/\/orcid.org\/0009-0008-7431-2898","authenticated-orcid":false,"given":"Jingjing","family":"Chen","sequence":"additional","affiliation":[{"name":"School of Computer Science, Shanghai Jiao Tong University, Shanghai, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-0758-0293","authenticated-orcid":false,"given":"Hao-Shu","family":"Fang","sequence":"additional","affiliation":[{"name":"School of Computer Science, Shanghai Jiao Tong University, Shanghai, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-1533-8576","authenticated-orcid":false,"given":"Cewu","family":"Lu","sequence":"additional","affiliation":[{"name":"School of Artificial Intelligence, Shanghai Jiao Tong University, Shanghai, China"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1016\/j.robot.2022.104224"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.15607\/RSS.2023.XIX.041"},{"key":"ref3","first-page":"4844","article-title":"MimicTouch: Leveraging multi-modal human tactile demonstrations for contact-rich manipulation","volume-title":"Proc. Conf. Robot Learn.","author":"Yu","year":"2024"},{"key":"ref4","article-title":"Adaptive compliance policy: Learning approximate compliance for diffusion guided control","author":"Hou","year":"2024"},{"key":"ref5","first-page":"947","article-title":"ManiWAV: Learning robot manipulation from in-the-wild audio-visual data","volume-title":"Proc. Conf. Robot Learn.","author":"Liu","year":"2024"},{"key":"ref6","article-title":"Vegetable peeling: A case study in constrained dexterous manipulation","author":"Chen","year":"2024"},{"key":"ref7","article-title":"ForceMimic: Force-centric imitation learning with force-motion capture system for contact-rich manipulation","author":"Liu","year":"2024"},{"key":"ref8","first-page":"726","article-title":"Transporter networks: Rearranging the visual world for robotic manipulation","volume-title":"Proc. Conf. Robot Learn.","author":"Zeng","year":"2020"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.15607\/RSS.2023.XIX.025"},{"key":"ref10","doi-asserted-by":"publisher","DOI":"10.15607\/RSS.2023.XIX.026"},{"key":"ref11","first-page":"6892","article-title":"Open X-embodiment: Robotic learning datasets and RT-X models","volume-title":"Proc. IEEE Int. Conf. Robot. Autom.","author":"Collaboration","year":"2024"},{"key":"ref12","article-title":"OpenVLA: An open-source vision-language-action model","author":"Kim","year":"2024"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.15607\/rss.2024.xx.090"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1109\/IROS58592.2024.10801678"},{"key":"ref15","article-title":"CAGE: Causal attention enables data-efficient generalizable robotic manipulation","author":"Xia","year":"2024"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.15607\/RSS.2023.XIX.016"},{"key":"ref17","first-page":"2165","article-title":"RT-2: Vision-language-action models transfer web knowledge to robotic control","volume-title":"Proc. Conf. Robot Learn.","author":"Zitkovich","year":"2023"},{"key":"ref18","first-page":"340","article-title":"Play to the score: Stage-guided dynamic multi-sensory fusion for robotic manipulation","volume-title":"Proc. Conf. Robot Learn.","author":"Feng","year":"2024"},{"key":"ref19","first-page":"1368","article-title":"See, hear, and feel: Smart sensory fusion for robotic manipulation","volume-title":"Proc. Conf. Robot Learn.","author":"Li","year":"2022"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.1109\/ICRA57147.2024.10611305"},{"key":"ref21","first-page":"2557","article-title":"3D-ViTac: Learning fine-grained manipulation with visuo-tactile sensing","volume-title":"Proc. CoRL","author":"Huang","year":"2024"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.1109\/IROS58592.2024.10802778"},{"key":"ref23","first-page":"2026","article-title":"Visuo-tactile transformers for manipulation","volume-title":"Proc. Conf. Robot Learn.","author":"Chen","year":"2022"},{"key":"ref24","doi-asserted-by":"publisher","DOI":"10.1109\/ICRA.2019.8793485"},{"key":"ref25","article-title":"TacDiffusion: Force-domain diffusion policy for precise tactile manipulation","author":"Wu","year":"2024"},{"key":"ref26","doi-asserted-by":"publisher","DOI":"10.1109\/tie.2025.3561816"},{"key":"ref27","doi-asserted-by":"publisher","DOI":"10.1109\/LRA.2020.2977257"},{"key":"ref28","doi-asserted-by":"publisher","DOI":"10.3390\/s17122762"},{"key":"ref29","first-page":"3766","article-title":"Transferable tactile transformers for representation learning across diverse sensors and tasks","volume-title":"Proc. Conf. Robot Learn.","author":"Zhao","year":"2024"},{"key":"ref30","first-page":"587","article-title":"ReSkin: Versatile, replaceable, lasting tactile skins","volume-title":"Proc. Conf. Robot Learn.","author":"Bhirangi","year":"2021"},{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.1109\/LRA.2018.2812915"},{"key":"ref32","article-title":"AnySkin: Plug-and-play skin sensing for robotic touch","author":"Bhirangi","year":"2024"},{"key":"ref33","doi-asserted-by":"publisher","DOI":"10.1109\/JSEN.2021.3123638"},{"key":"ref34","doi-asserted-by":"publisher","DOI":"10.1109\/LRA.2020.3010739"},{"key":"ref35","doi-asserted-by":"publisher","DOI":"10.1109\/ICRA.2019.8794366"},{"key":"ref36","doi-asserted-by":"publisher","DOI":"10.1109\/ICRA.2015.7139504"},{"key":"ref37","article-title":"Learning diffusion policies from demonstrations for compliant contact-rich manipulation","author":"Aburub","year":"2024"},{"key":"ref38","doi-asserted-by":"publisher","DOI":"10.1109\/IROS58592.2024.10801731"},{"key":"ref39","doi-asserted-by":"publisher","DOI":"10.1109\/AIM55361.2024.10637173"},{"key":"ref40","doi-asserted-by":"publisher","DOI":"10.1109\/ACCESS.2025.3541200"},{"key":"ref41","doi-asserted-by":"publisher","DOI":"10.1115\/1.3140702"},{"key":"ref42","doi-asserted-by":"publisher","DOI":"10.1109\/TSMC.1981.4308708"},{"key":"ref43","doi-asserted-by":"publisher","DOI":"10.1115\/1.3139652"},{"key":"ref44","doi-asserted-by":"publisher","DOI":"10.1109\/ROBOT.1985.1087266"},{"key":"ref45","doi-asserted-by":"publisher","DOI":"10.1109\/IROS.2011.6095096"},{"key":"ref46","doi-asserted-by":"publisher","DOI":"10.1109\/ICRA.2015.7138994"},{"key":"ref47","doi-asserted-by":"publisher","DOI":"10.1109\/LRA.2025.3551637"},{"key":"ref48","doi-asserted-by":"publisher","DOI":"10.1109\/IROS40897.2019.8968204"},{"key":"ref49","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2019.00319"},{"key":"ref50","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2016.90"},{"key":"ref51","doi-asserted-by":"publisher","DOI":"10.48550\/ARXIV.1706.03762"},{"key":"ref52","first-page":"6840","article-title":"Denoising diffusion probabilistic models","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","author":"Ho","year":"2020"},{"key":"ref53","article-title":"Denoising diffusion implicit models","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Song","year":"2021"},{"key":"ref54","doi-asserted-by":"publisher","DOI":"10.1109\/ICRA57147.2024.10611615"}],"container-title":["IEEE Robotics and Automation Letters"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/7083369\/10969146\/10964857.pdf?arnumber=10964857","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,5,8]],"date-time":"2025-05-08T04:28:11Z","timestamp":1746678491000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10964857\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,6]]},"references-count":54,"journal-issue":{"issue":"6"},"URL":"https:\/\/doi.org\/10.1109\/lra.2025.3560871","relation":{},"ISSN":["2377-3766","2377-3774"],"issn-type":[{"value":"2377-3766","type":"electronic"},{"value":"2377-3774","type":"electronic"}],"subject":[],"published":{"date-parts":[[2025,6]]}}}