{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,11,15]],"date-time":"2025-11-15T08:53:11Z","timestamp":1763196791130,"version":"3.45.0"},"publisher-location":"Singapore","reference-count":26,"publisher":"Springer Nature Singapore","isbn-type":[{"value":"9789819533480","type":"print"},{"value":"9789819533497","type":"electronic"}],"license":[{"start":{"date-parts":[[2025,11,16]],"date-time":"2025-11-16T00:00:00Z","timestamp":1763251200000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"},{"start":{"date-parts":[[2025,11,16]],"date-time":"2025-11-16T00:00:00Z","timestamp":1763251200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2026]]},"DOI":"10.1007\/978-981-95-3349-7_24","type":"book-chapter","created":{"date-parts":[[2025,11,15]],"date-time":"2025-11-15T08:50:07Z","timestamp":1763196607000},"page":"307-319","update-policy":"https:\/\/doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":0,"title":["LLM-Enhanced Translation for\u00a0Low-Resource Languages: Cross-Lingual Alignment and\u00a0Multi-domain Adaptation"],"prefix":"10.1007","author":[{"given":"Qifeng","family":"Su","sequence":"first","affiliation":[]},{"given":"Zhicong","family":"Wu","sequence":"additional","affiliation":[]},{"given":"Xiaodong","family":"Shi","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2025,11,16]]},"reference":[{"key":"24_CR1","doi-asserted-by":"publisher","unstructured":"Alves, D., e.a.: Steering large language models for machine translation with finetuning and in-context learning. In: Bouamor, H., Pino, J., Bali, K. (eds.) Findings of the Association for Computational Linguistics: EMNLP 2023. pp. 11127\u201311148. Association for Computational Linguistics, Singapore (2023). https:\/\/doi.org\/10.18653\/v1\/2023.findings-emnlp.744","DOI":"10.18653\/v1\/2023.findings-emnlp.744"},{"key":"24_CR2","doi-asserted-by":"publisher","unstructured":"Briakou, E., Luo, J., Cherry, C., Freitag, M.: Translating step-by-step: Decomposing the translation process for improved translation quality of long-form texts. In: Haddow, B., Kocmi, T., Koehn, P., Monz, C. (eds.) Proceedings of the Ninth Conference on Machine Translation, pp. 1301\u20131317. Association for Computational Linguistics, Miami, Florida, USA (2024). https:\/\/doi.org\/10.18653\/v1\/2024.wmt-1.123","DOI":"10.18653\/v1\/2024.wmt-1.123"},{"key":"24_CR3","doi-asserted-by":"crossref","unstructured":"Coleman, J., Krishnamachari, B., Iskarous, K., Rosales, R.: LLM-assisted rule based machine translation for low\/no-resource languages. arXiv preprint arXiv:2405.08997 (2024)","DOI":"10.18653\/v1\/2024.americasnlp-1.9"},{"key":"24_CR4","unstructured":"Cui, Y., Yang, Z., Yao, X.: Efficient and effective text encoding for chinese llama and alpaca (2024). https:\/\/arxiv.org\/abs\/2304.08177"},{"key":"24_CR5","unstructured":"Deshpande, T., Kowtal, N., Joshi, R.: Chain-of-translation prompting (cotr): A novel prompting technique for low resource languages (2024). https:\/\/arxiv.org\/abs\/2409.04512"},{"key":"24_CR6","unstructured":"Feng, Z., e.a.: Tear: Improving LLM-based machine translation with systematic self-refinement (2024). https:\/\/arxiv.org\/abs\/2402.16379"},{"key":"24_CR7","doi-asserted-by":"publisher","unstructured":"Guo, J., e.a.: A novel paradigm boosting translation capabilities of large language models. In: Duh, K., Gomez, H., Bethard, S. (eds.) Findings of the Association for Computational Linguistics: NAACL 2024. pp. 639\u2013649. Association for Computational Linguistics, Mexico City, Mexico (2024). https:\/\/doi.org\/10.18653\/v1\/2024.findings-naacl.42","DOI":"10.18653\/v1\/2024.findings-naacl.42"},{"key":"24_CR8","doi-asserted-by":"crossref","unstructured":"Exploring human-like translation strategy with large language models: He, Z., e.a. Trans. Assoc. Comput. Linguis. 12, 229\u2013246 (2024)","DOI":"10.1162\/tacl_a_00642"},{"key":"24_CR9","doi-asserted-by":"crossref","unstructured":"Kocmi, T., e.a.: Findings of the wmt24 general machine translation shared task: the LLM era is here but MT is not solved yet. In: Proceedings of the Ninth Conference on Machine Translation, pp. 1\u201346 (2024)","DOI":"10.18653\/v1\/2024.wmt-1.1"},{"key":"24_CR10","doi-asserted-by":"publisher","unstructured":"Kuulmets, H.A., Purason, T., Luhtaru, A., Fishel, M.: Teaching llama a new language through cross-lingual knowledge transfer. In: Duh, K., Gomez, H., Bethard, S. (eds.) Findings of the Association for Computational Linguistics: NAACL 2024, pp. 3309\u20133325. Association for Computational Linguistics, Mexico City, Mexico (2024). https:\/\/doi.org\/10.18653\/v1\/2024.findings-naacl.210","DOI":"10.18653\/v1\/2024.findings-naacl.210"},{"key":"24_CR11","doi-asserted-by":"publisher","unstructured":"Li, J., Huang, S., Ching, A., Dai, X., Chen, J.: PreAlign: Boosting cross-lingual transfer by early establishment of multilingual alignment. In: Al-Onaizan, Y., Bansal, M., Chen, Y.N. (eds.) Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing, pp. 10246\u201310257. Association for Computational Linguistics, Miami, Florida, USA (2024). https:\/\/doi.org\/10.18653\/v1\/2024.emnlp-main.572","DOI":"10.18653\/v1\/2024.emnlp-main.572"},{"key":"24_CR12","unstructured":"Mahdizadeh\u00a0Sani, S., Sadeghi, P., Vu, T.T., Yaghoobzadeh, Y., Haffari, G.: Extending LLMs to new languages: A case study of llama and Persian adaptation. In: Rambow, O., Wanner, L., Apidianaki, M., Al-Khalifa, H., Eugenio, B.D., Schockaert, S. (eds.) Proceedings of the 31st International Conference on Computational Linguistics, pp. 8868\u20138884. Association for Computational Linguistics, Abu Dhabi, UAE (2025). https:\/\/aclanthology.org\/2025.coling-main.594\/"},{"key":"24_CR13","unstructured":"Ming, L., e.a.: Marco-LLM: Bridging languages via massive multilingual training for cross-lingual enhancement (2024). https:\/\/arxiv.org\/abs\/2412.04003"},{"key":"24_CR14","unstructured":"Rostami, P., Salemi, A., Dousti, M.J.: Persianmind: A cross-lingual Persian-english large language model (2024). https:\/\/arxiv.org\/abs\/2401.06466"},{"key":"24_CR15","unstructured":"shajiu: Yak-llama 2:\n\n                    \n                    \n                      The image contains Chinese text that translates to \"Tibetan language model based on augmented pre-training.\"\n                    \n                   (2024). git@github.com:Shajiu\/Yak-Llama-2.git"},{"key":"24_CR16","unstructured":"Touvron, H., e.a.: Llama 2: Open foundation and fine-tuned chat models (2023). https:\/\/arxiv.org\/abs\/2307.09288"},{"key":"24_CR17","unstructured":"Volk, M., Fischer, D.P., Fischer, L., Scheurer, P., Str\u00f6bel, P.B.: LLM-based machine translation and summarization for Latin. In: Sprugnoli, R., Passarotti, M. (eds.) Proceedings of the Third Workshop on Language Technologies for Historical and Ancient Languages (LT4HALA) @ LREC-COLING-2024, pp. 122\u2013128. ELRA and ICCL, Torino, Italia (2024). https:\/\/aclanthology.org\/2024.lt4hala-1.15\/"},{"key":"24_CR18","doi-asserted-by":"publisher","unstructured":"Wang, Y., e.a.: TasTe: Teaching large language models to translate through self-reflection. In: Ku, L.W., Martins, A., Srikumar, V. (eds.) Proceedings of the 62nd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers), pp. 6144\u20136158. Association for Computational Linguistics, Bangkok, Thailand (2024). https:\/\/doi.org\/10.18653\/v1\/2024.acl-long.333","DOI":"10.18653\/v1\/2024.acl-long.333"},{"key":"24_CR19","doi-asserted-by":"crossref","unstructured":"Wang, J., Meng, F., Liang, Y., Zhou, J.: Drt: Deep reasoning translation via long chain-of-thought (2025). https:\/\/arxiv.org\/abs\/2412.17498","DOI":"10.18653\/v1\/2025.findings-acl.351"},{"key":"24_CR20","unstructured":"Wenhao, Z., Yuan, S., Xiaobing, Z.: TiLamb: (TiLamb:\n\n                    \n                    \n                      The image contains Chinese text that translates to \"Tibetan large language model based on augmented pre-training.\"\n                    \n                   A Tibetan large language model based on incremental pre-training). In: Sun, M., Liang, J., Han, X., Liu, Z., He, Y. (eds.) Proceedings of the 23rd Chinese National Conference on Computational Linguistics (Volume 1: Main Conference), pp. 254\u2013267. Chinese Information Processing Society of China, Taiyuan, China (2024). https:\/\/aclanthology.org\/2024.ccl-1.19\/"},{"key":"24_CR21","doi-asserted-by":"publisher","unstructured":"Wu, Q., Nagata, M., Miao, Z., Tsuruoka, Y.: Word alignment as preference for machine translation. In: Al-Onaizan, Y., Bansal, M., Chen, Y.N. (eds.) Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing, pp. 3223\u20133239. Association for Computational Linguistics, Miami, Florida, USA (2024). https:\/\/doi.org\/10.18653\/v1\/2024.emnlp-main.188","DOI":"10.18653\/v1\/2024.emnlp-main.188"},{"key":"24_CR22","unstructured":"Xu, H., e.a.: Contrastive preference optimization: pushing the boundaries of llm performance in machine translation. In: Proceedings of the 41st International Conference on Machine Learning. ICML\u201924, JMLR.org (2024)"},{"key":"24_CR23","unstructured":"Xu, H., Kim, Y., Sharaf, A., Awadalla, H.: A paradigm shift in machine translation: Boosting translation performance of large language models. arXiv preprint arXiv:2309.11674 (2023)"},{"key":"24_CR24","doi-asserted-by":"publisher","unstructured":"Xu, J., Zhou, H., Gan, C., Zheng, Z., Li, L.: Vocabulary learning via optimal transport for neural machine translation. In: Zong, C., Xia, F., Li, W., Navigli, R. (eds.) Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics and the 11th International Joint Conference on Natural Language Processing (Volume 1: Long Papers), pp. 7361\u20137373. Association for Computational Linguistics, Online (2021). https:\/\/doi.org\/10.18653\/v1\/2021.acl-long.571","DOI":"10.18653\/v1\/2021.acl-long.571"},{"key":"24_CR25","unstructured":"Zheng, J., e.a.: Fine-tuning large language models for domain-specific machine translation (2024). https:\/\/arxiv.org\/abs\/2402.15061"},{"key":"24_CR26","doi-asserted-by":"publisher","unstructured":"Zhu, W., e.a.: Multilingual machine translation with large language models: Empirical results and analysis. In: Duh, K., Gomez, H., Bethard, S. (eds.) Findings of the Association for Computational Linguistics: NAACL 2024, pp. 2765\u20132781. Association for Computational Linguistics, Mexico City, Mexico (2024). https:\/\/doi.org\/10.18653\/v1\/2024.findings-naacl.176","DOI":"10.18653\/v1\/2024.findings-naacl.176"}],"container-title":["Lecture Notes in Computer Science","Natural Language Processing and Chinese Computing"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/978-981-95-3349-7_24","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,11,15]],"date-time":"2025-11-15T08:50:11Z","timestamp":1763196611000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/978-981-95-3349-7_24"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,11,16]]},"ISBN":["9789819533480","9789819533497"],"references-count":26,"URL":"https:\/\/doi.org\/10.1007\/978-981-95-3349-7_24","relation":{},"ISSN":["0302-9743","1611-3349"],"issn-type":[{"value":"0302-9743","type":"print"},{"value":"1611-3349","type":"electronic"}],"subject":[],"published":{"date-parts":[[2025,11,16]]},"assertion":[{"value":"16 November 2025","order":1,"name":"first_online","label":"First Online","group":{"name":"ChapterHistory","label":"Chapter History"}},{"value":"NLPCC","order":1,"name":"conference_acronym","label":"Conference Acronym","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"CCF International Conference on Natural Language Processing and Chinese Computing","order":2,"name":"conference_name","label":"Conference Name","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"Urumqi","order":3,"name":"conference_city","label":"Conference City","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"China","order":4,"name":"conference_country","label":"Conference Country","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"2025","order":5,"name":"conference_year","label":"Conference Year","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"7 August 2025","order":7,"name":"conference_start_date","label":"Conference Start Date","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"9 August 2025","order":8,"name":"conference_end_date","label":"Conference End Date","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"14","order":9,"name":"conference_number","label":"Conference Number","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"nlpcc2025","order":10,"name":"conference_id","label":"Conference ID","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"http:\/\/tcci.ccf.org.cn\/conference\/2025\/index.php","order":11,"name":"conference_url","label":"Conference URL","group":{"name":"ConferenceInfo","label":"Conference Information"}}]}}