{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,6,18]],"date-time":"2025-06-18T04:17:58Z","timestamp":1750220278830,"version":"3.41.0"},"publisher-location":"New York, NY, USA","reference-count":1,"publisher":"ACM","license":[{"start":{"date-parts":[[2022,3,3]],"date-time":"2022-03-03T00:00:00Z","timestamp":1646265600000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.acm.org\/publications\/policies\/copyright_policy#Background"}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2022,3,3]]},"DOI":"10.1145\/3478432.3499048","type":"proceedings-article","created":{"date-parts":[[2022,2,23]],"date-time":"2022-02-23T01:00:48Z","timestamp":1645578048000},"page":"1077-1077","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":1,"title":["Using Deep Learning to Localize Errors in Student Code Submissions"],"prefix":"10.1145","author":[{"given":"Shion","family":"Fujimori","sequence":"first","affiliation":[{"name":"University of Toronto, Mississauga, ON, Canada"}]},{"given":"Mohamed","family":"Harmanani","sequence":"additional","affiliation":[{"name":"University of Toronto, Mississauga, ON, Canada"}]},{"given":"Owais","family":"Siddiqui","sequence":"additional","affiliation":[{"name":"University of Toronto Mississauga, Mississauga, ON, Canada"}]},{"given":"Lisa","family":"Zhang","sequence":"additional","affiliation":[{"name":"University of Toronto Mississauga, Mississauga, ON, Canada"}]}],"member":"320","published-online":{"date-parts":[[2022,3,3]]},"reference":[{"key":"e_1_3_2_1_1_1","volume-title":"CodeBERT: A Pre-Trained Model for Programming and Natural Languages. In Findings of the Association for Computational Linguistics: EMNLP 2020 . Association for Computational Linguistics.","author":"Feng Zhangyin","year":"2020","unstructured":"Zhangyin Feng , Daya Guo , Duyu Tang , Nan Duan , Xiaocheng Feng , Ming Gong , Linjun Shou , Bing Qin , Ting Liu , Daxin Jiang , and Ming Zhou . 2020 . CodeBERT: A Pre-Trained Model for Programming and Natural Languages. In Findings of the Association for Computational Linguistics: EMNLP 2020 . Association for Computational Linguistics. Zhangyin Feng, Daya Guo, Duyu Tang, Nan Duan, Xiaocheng Feng, Ming Gong, Linjun Shou, Bing Qin, Ting Liu, Daxin Jiang, and Ming Zhou. 2020. CodeBERT: A Pre-Trained Model for Programming and Natural Languages. In Findings of the Association for Computational Linguistics: EMNLP 2020 . Association for Computational Linguistics."}],"event":{"name":"SIGCSE 2022: The 53rd ACM Technical Symposium on Computer Science Education","sponsor":["SIGCSE ACM Special Interest Group on Computer Science Education"],"location":"Providence RI USA","acronym":"SIGCSE 2022"},"container-title":["Proceedings of the 53rd ACM Technical Symposium on Computer Science Education V. 2"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3478432.3499048","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3478432.3499048","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,6,17]],"date-time":"2025-06-17T19:31:15Z","timestamp":1750188675000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3478432.3499048"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2022,3,3]]},"references-count":1,"alternative-id":["10.1145\/3478432.3499048","10.1145\/3478432"],"URL":"https:\/\/doi.org\/10.1145\/3478432.3499048","relation":{},"subject":[],"published":{"date-parts":[[2022,3,3]]},"assertion":[{"value":"2022-03-03","order":2,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}