{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,12,16]],"date-time":"2025-12-16T18:45:53Z","timestamp":1765910753298,"version":"3.48.0"},"reference-count":41,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","issue":"5","license":[{"start":{"date-parts":[[2025,10,1]],"date-time":"2025-10-01T00:00:00Z","timestamp":1759276800000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2025,10,1]],"date-time":"2025-10-01T00:00:00Z","timestamp":1759276800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2025,10,1]],"date-time":"2025-10-01T00:00:00Z","timestamp":1759276800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"name":"National Natural Science Fund for Key International Collaboration","award":["62120106005"],"award-info":[{"award-number":["62120106005"]}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["62273054"],"award-info":[{"award-number":["62273054"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Trans. Cogn. Dev. Syst."],"published-print":{"date-parts":[[2025,10]]},"DOI":"10.1109\/tcds.2025.3539665","type":"journal-article","created":{"date-parts":[[2025,2,7]],"date-time":"2025-02-07T13:46:27Z","timestamp":1738935987000},"page":"1050-1060","source":"Crossref","is-referenced-by-count":1,"title":["Where To Learn: Embodied Perception Learning Planned by Vision-Language Models"],"prefix":"10.1109","volume":"17","author":[{"ORCID":"https:\/\/orcid.org\/0000-0002-8241-8691","authenticated-orcid":false,"given":"Juan","family":"Wang","sequence":"first","affiliation":[{"name":"Department of Computer Science and Technology and Beijing National Research Center for Information Science and Technology, Tsinghua University, Beijing, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-9816-0103","authenticated-orcid":false,"given":"Di","family":"Guo","sequence":"additional","affiliation":[{"name":"School of Artificial Intelligence, Beijing University of Posts and Telecommunications, Beijing, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-4042-6044","authenticated-orcid":false,"given":"Huaping","family":"Liu","sequence":"additional","affiliation":[{"name":"Department of Computer Science and Technology and Beijing National Research Center for Information Science and Technology, Tsinghua University, Beijing, China"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-030-58539-6_19"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.1109\/ICRA48891.2023.10160951"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1109\/TCDS.2024.3377445"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1109\/LRA.2022.3188901"},{"key":"ref5","first-page":"13086","article-title":"SEAL: Self-supervised embodied active learning using exploration and 3D consistency","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"34","author":"Chaplot","year":"2021"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.1109\/LRA.2022.3157028"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v35i3.16338"},{"article-title":"Embodied learning for lifelong visual perception","year":"2021","author":"Nilsson","key":"ref8"},{"key":"ref9","first-page":"4247","article-title":"Object goal navigation using goal-oriented semantic exploration","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"33","author":"Chaplot","year":"2020"},{"key":"ref10","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-031-72992-8_5"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.5244\/c.35.174"},{"key":"ref12","doi-asserted-by":"publisher","DOI":"10.1109\/SII58957.2024.10417237"},{"article-title":"ALP: Action-aware embodied learning for perception","year":"2023","author":"Liang","key":"ref13"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2023.3323040"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.1109\/TCDS.2020.2993855"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1109\/TCDS.2021.3075862"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1109\/TCDS.2018.2868425"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1109\/TCDS.2023.3284070"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1108\/RIA-01-2024-0011"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.1108\/RIA-11-2023-0163"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1108\/RIA-08-2023-0107"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.1108\/RIA-11-2023-0173"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.1017\/S0263574722001205"},{"key":"ref24","doi-asserted-by":"publisher","DOI":"10.1109\/LRA.2022.3145964"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.15607\/RSS.2022.XVIII.032"},{"article-title":"Embodied task planning with large language models","year":"2023","author":"Wu","key":"ref26"},{"key":"ref27","doi-asserted-by":"publisher","DOI":"10.1109\/iros58592.2024.10802322"},{"key":"ref28","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV51070.2023.00280"},{"key":"ref29","doi-asserted-by":"publisher","DOI":"10.1109\/tpami.2025.3554559"},{"article-title":"Ghost in the Minecraft: Generally capable agents for open-world environments via large language models with text-based knowledge and memory","year":"2023","author":"Zhu","key":"ref30"},{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.1109\/IROS55552.2023.10342512"},{"key":"ref32","first-page":"2683","article-title":"Navigation with large language models: Semantic guesswork as a heuristic for planning","volume-title":"Proc. Conf. Robot Learn.","author":"Shah","year":"2023"},{"key":"ref33","doi-asserted-by":"publisher","DOI":"10.1142\/s2972335324500042"},{"article-title":"VIMA: General robot manipulation with multimodal prompts","year":"2022","author":"Jiang","key":"ref34"},{"article-title":"PaLM-E: An embodied multimodal language model","year":"2023","author":"Driess","key":"ref35"},{"key":"ref36","first-page":"2165","article-title":"RT-2: Vision-language-action models transfer web knowledge to robotic control","volume-title":"Proc. Conf. Robot Learn.","author":"Zitkovich","year":"2023"},{"article-title":"Towards end-to-end embodied decision making via multi-modal large language model: Explorations with gpt4-vision and beyond","year":"2023","author":"Chen","key":"ref37"},{"article-title":"RePLan: Robotic replanning with perception and language models","year":"2024","author":"Skreta","key":"ref38"},{"article-title":"Look before you leap: Unveiling the power of GPT-4V in robotic vision-language planning","year":"2023","author":"Hu","key":"ref39"},{"article-title":"Selective visual representations improve convergence and generalization for embodied AI","year":"2023","author":"Eftekhar","key":"ref40"},{"key":"ref41","first-page":"8748","article-title":"Learning transferable visual models from natural language supervision","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Radford","year":"2021"}],"container-title":["IEEE Transactions on Cognitive and Developmental Systems"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/7274989\/11197738\/10877883.pdf?arnumber=10877883","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,12,16]],"date-time":"2025-12-16T18:33:16Z","timestamp":1765909996000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10877883\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,10]]},"references-count":41,"journal-issue":{"issue":"5"},"URL":"https:\/\/doi.org\/10.1109\/tcds.2025.3539665","relation":{},"ISSN":["2379-8920","2379-8939"],"issn-type":[{"type":"print","value":"2379-8920"},{"type":"electronic","value":"2379-8939"}],"subject":[],"published":{"date-parts":[[2025,10]]}}}