{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,11,15]],"date-time":"2025-11-15T07:28:30Z","timestamp":1763191710428,"version":"3.45.0"},"reference-count":45,"publisher":"IEEE","license":[{"start":{"date-parts":[[2025,6,30]],"date-time":"2025-06-30T00:00:00Z","timestamp":1751241600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2025,6,30]],"date-time":"2025-06-30T00:00:00Z","timestamp":1751241600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2025,6,30]]},"DOI":"10.1109\/ijcnn64981.2025.11227991","type":"proceedings-article","created":{"date-parts":[[2025,11,14]],"date-time":"2025-11-14T18:46:15Z","timestamp":1763145975000},"page":"1-9","source":"Crossref","is-referenced-by-count":0,"title":["Bridging the Language Gap: Enhancing Multilingual Prompt-Based Code Generation in LLMs via Zero-Shot Cross-Lingual Transfer"],"prefix":"10.1109","author":[{"given":"Mingda","family":"Li","sequence":"first","affiliation":[{"name":"Yale University,Department of Statistics and Data Science,New Haven,USA"}]},{"given":"Abhijit","family":"Mishra","sequence":"additional","affiliation":[{"name":"School of Information, University of Texas at Austin,Austin,USA"}]},{"given":"Utkarsh","family":"Mujumdar","sequence":"additional","affiliation":[{"name":"School of Information, University of Texas at Austin,Austin,USA"}]}],"member":"263","reference":[{"article-title":"Code llama: open foundation models for code","year":"2023","author":"Roziere","key":"ref1"},{"article-title":"A survey of large language models","year":"2023","author":"Zhao","key":"ref2"},{"key":"ref3","first-page":"10764","article-title":"PAL: program-aided language models","volume-title":"Int. Conf. Machine Learning","author":"Gao"},{"article-title":"Program synthesis with large language models","year":"2021","author":"Austin","key":"ref4"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.1145\/3558489.3559072"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2022.bigscience-1.3"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v35i14.17505"},{"article-title":"Language models are multilingual chain-of-thought reasoners","year":"2022","author":"Shi","key":"ref8"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2023.emnlp-main.163"},{"key":"ref10","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2023.eacl-main.180"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.1162\/tacl_a_00288"},{"article-title":"Codegemma: open code models based on gemma","year":"2024","author":"Team","key":"ref12"},{"article-title":"Mistral 7b","year":"2023","author":"Jiang","key":"ref13"},{"year":"2023","key":"ref14","article-title":"GPT-4 technical report"},{"article-title":"Llama 2: open foundation and fine-tuned chat models","year":"2023","author":"Touvron","key":"ref15"},{"article-title":"Gemma: open models based on gemini research and technology","year":"2024","author":"Team","key":"ref16"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2023.findings-emnlp.878"},{"article-title":"Bigscience: a case study in the social construction of a multilingual large language model","year":"2022","author":"Akiki","key":"ref18"},{"article-title":"Language models are multilingual chain-of-thought reasoners","year":"2022","author":"Shi","key":"ref19"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.1145\/3545945.3569759"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1145\/3501385.3543957"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2023.emnlp-main.258"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.coling-industry.1"},{"key":"ref24","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2023.findings-emnlp.826"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2022.naacl-main.255"},{"key":"ref26","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2021.acl-long.264"},{"key":"ref27","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2021.emnlp-main.672"},{"key":"ref28","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2022.emnlp-main.790"},{"article-title":"Bridging code semantic and llms: semantic chain-of-thought prompting for code generation","year":"2023","author":"Ma","key":"ref29"},{"key":"ref30","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/p16-1009"},{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/W18-2703"},{"article-title":"Self-alignment with instruction backtranslation","year":"2023","author":"Li","key":"ref32"},{"key":"ref33","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2023.findings-acl.676"},{"article-title":"BLOOM: a 176bparameter open-access multilingual language model","year":"2023","author":"Scao","key":"ref34"},{"article-title":"Auroram: the first open source multilingual language model red-teamed according to the u.s. executive order","year":"2024","author":"Nakamura","key":"ref35"},{"key":"ref36","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2024.emnlp-main.457"},{"key":"ref37","article-title":"Visual instruction tuning","volume":"36","author":"Liu","year":"2024","journal-title":"Advances in Neural Information Processing Systems"},{"key":"ref38","doi-asserted-by":"publisher","DOI":"10.1109\/ICASSP48485.2024.10447605"},{"article-title":"Paligemma: a versatile 3b vlm for transfer","year":"2024","author":"Beyer","key":"ref39"},{"article-title":"Monolingual or multilingual instruction tuning: which makes a better alpaca","year":"2024","author":"Chen","key":"ref40"},{"key":"ref41","doi-asserted-by":"publisher","DOI":"10.3115\/1073083.1073135"},{"article-title":"Lora: low-rank adaptation of large language models","year":"2021","author":"Hu","key":"ref42"},{"key":"ref43","doi-asserted-by":"crossref","DOI":"10.18653\/v1\/2022.findings-emnlp.154","article-title":"Bitext mining using distilled sentence representations for low-resource languages","author":"Heffernan","year":"2022"},{"key":"ref44","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2023.findings-emnlp.89"},{"article-title":"HumanEval-XL: a multilingual code generation benchmark for cross-lingual natural language generalization","year":"2024","author":"Peng","key":"ref45"}],"event":{"name":"2025 International Joint Conference on Neural Networks (IJCNN)","start":{"date-parts":[[2025,6,30]]},"location":"Rome, Italy","end":{"date-parts":[[2025,7,5]]}},"container-title":["2025 International Joint Conference on Neural Networks (IJCNN)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/11227166\/11227148\/11227991.pdf?arnumber=11227991","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,11,15]],"date-time":"2025-11-15T07:26:04Z","timestamp":1763191564000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/11227991\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,6,30]]},"references-count":45,"URL":"https:\/\/doi.org\/10.1109\/ijcnn64981.2025.11227991","relation":{},"subject":[],"published":{"date-parts":[[2025,6,30]]}}}