{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,2,21]],"date-time":"2025-02-21T01:21:44Z","timestamp":1740100904870,"version":"3.37.3"},"reference-count":21,"publisher":"IEEE","license":[{"start":{"date-parts":[[2022,7,18]],"date-time":"2022-07-18T00:00:00Z","timestamp":1658102400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2022,7,18]],"date-time":"2022-07-18T00:00:00Z","timestamp":1658102400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["61976057,62172101"],"award-info":[{"award-number":["61976057,62172101"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100003399","name":"Science and Technology Commission of Shanghai Municipality","doi-asserted-by":"publisher","award":["21511101000,20511101403"],"award-info":[{"award-number":["21511101000,20511101403"]}],"id":[{"id":"10.13039\/501100003399","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/100007219","name":"Shanghai Natural Science Foundation","doi-asserted-by":"publisher","award":["19ZR1417200"],"award-info":[{"award-number":["19ZR1417200"]}],"id":[{"id":"10.13039\/100007219","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2022,7,18]]},"DOI":"10.1109\/icme52920.2022.9859855","type":"proceedings-article","created":{"date-parts":[[2022,8,26]],"date-time":"2022-08-26T19:45:18Z","timestamp":1661543118000},"page":"1-6","source":"Crossref","is-referenced-by-count":3,"title":["STDNet: Spatio-Temporal Decomposed Network for Video Grounding"],"prefix":"10.1109","author":[{"given":"Yuanwu","family":"Xu","sequence":"first","affiliation":[{"name":"School of Computer Science, Fudan University,Shanghai Key Laboratory of Intelligent Information Processing"}]},{"given":"Yuejie","family":"Zhang","sequence":"additional","affiliation":[{"name":"School of Computer Science, Fudan University,Shanghai Key Laboratory of Intelligent Information Processing"}]},{"given":"Rui","family":"Feng","sequence":"additional","affiliation":[{"name":"School of Computer Science, Fudan University,Shanghai Key Laboratory of Intelligent Information Processing"}]},{"given":"Rui-Wei","family":"Zhao","sequence":"additional","affiliation":[{"name":"Academy for Engineering and Technology, Fudan University"}]},{"given":"Tao","family":"Zhang","sequence":"additional","affiliation":[{"name":"School of Information Management and Engineering, Shanghai University of Finance and Economics,Shanghai Key Laboratory of Financial Information Technology"}]},{"given":"Xuequan","family":"Lu","sequence":"additional","affiliation":[{"name":"School of Information Technology, Deakin University"}]},{"given":"Shang","family":"Gao","sequence":"additional","affiliation":[{"name":"School of Information Technology, Deakin University"}]}],"member":"263","reference":[{"key":"ref10","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR46437.2021.01108"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR42600.2020.01030"},{"key":"ref12","first-page":"568","article-title":"Two-stream convolutional networks for action recognition in videos","author":"simonyan","year":"0","journal-title":"NIPS"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2015.510"},{"key":"ref14","first-page":"5998","article-title":"Attention is all you need","author":"vaswani","year":"0","journal-title":"NIPS"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.acl-demos.14"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2017.83"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1162\/tacl_a_00207"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2017.502"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1109\/ICME51207.2021.9428369"},{"key":"ref4","first-page":"1984","article-title":"Excl: Extractive clip localization using natural language descriptions","author":"ghosh","year":"2019","journal-title":"NAACL-HLT"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2019.00630"},{"key":"ref6","first-page":"10807","article-title":"Local-global video-text inter-actions for temporal grounding","author":"mun","year":"0","journal-title":"CVPR"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.acl-main.585"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2020.3038993"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1145\/3331184.3331235"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-319-93417-4_38"},{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2017.563"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v34i07.6984"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2021.emnlp-main.327"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR46437.2021.00418"}],"event":{"name":"2022 IEEE International Conference on Multimedia and Expo (ICME)","start":{"date-parts":[[2022,7,18]]},"location":"Taipei, Taiwan","end":{"date-parts":[[2022,7,22]]}},"container-title":["2022 IEEE International Conference on Multimedia and Expo (ICME)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/9859562\/9858923\/09859855.pdf?arnumber=9859855","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2022,9,19]],"date-time":"2022-09-19T20:24:32Z","timestamp":1663619072000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/9859855\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2022,7,18]]},"references-count":21,"URL":"https:\/\/doi.org\/10.1109\/icme52920.2022.9859855","relation":{},"subject":[],"published":{"date-parts":[[2022,7,18]]}}}