{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,2,20]],"date-time":"2026-02-20T22:06:11Z","timestamp":1771625171353,"version":"3.50.1"},"reference-count":21,"publisher":"IEEE","license":[{"start":{"date-parts":[[2024,11,5]],"date-time":"2024-11-05T00:00:00Z","timestamp":1730764800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2024,11,5]],"date-time":"2024-11-05T00:00:00Z","timestamp":1730764800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2024,11,5]]},"DOI":"10.1109\/kse63888.2024.11063523","type":"proceedings-article","created":{"date-parts":[[2025,7,11]],"date-time":"2025-07-11T17:41:44Z","timestamp":1752255704000},"page":"327-332","source":"Crossref","is-referenced-by-count":0,"title":["Enhancing Reading Comprehension of Vietnamese LLMs with Synthetic Data"],"prefix":"10.1109","author":[{"given":"Thang V.Q.","family":"Le","sequence":"first","affiliation":[{"name":"Ton Duc Thang University,Natural Language Processing and Knowledge Discovery Laboratory, Faculty of Information Technology,Ho Chi Minh City,Vietnam"}]},{"given":"Nguyen P.","family":"Nguyen","sequence":"additional","affiliation":[{"name":"Ton Duc Thang University,Natural Language Processing and Knowledge Discovery Laboratory, Faculty of Information Technology,Ho Chi Minh City,Vietnam"}]},{"given":"Trong-Chi","family":"Duong","sequence":"additional","affiliation":[{"name":"Ton Duc Thang University,Natural Language Processing and Knowledge Discovery Laboratory, Faculty of Information Technology,Ho Chi Minh City,Vietnam"}]},{"given":"Anh-Cuong","family":"Le","sequence":"additional","affiliation":[{"name":"Ton Duc Thang University,Natural Language Processing and Knowledge Discovery Laboratory, Faculty of Information Technology,Ho Chi Minh City,Vietnam"}]},{"given":"Viet-Cuong","family":"Nguyen","sequence":"additional","affiliation":[{"name":"Intelligent Integration Co. Ltd (INT2),Vietnam"}]},{"given":"Viet-Ha","family":"Nguyen","sequence":"additional","affiliation":[{"name":"Institute for Artificial Intelligence, VNU University of Engineering and Technology,Hanoi,Vietnam"}]}],"member":"263","reference":[{"key":"ref1","volume-title":"Language Models are Unsupervised Multitask Learners","author":"Radford","year":"2019"},{"key":"ref2","article-title":"Scaling Laws for Neural Language Models","author":"Kaplan","year":"2020","journal-title":"arXiv preprint arXiv"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.clinicalnlp-1.17"},{"key":"ref4","article-title":"Language Models are Few-Shot Learners","author":"Brown","year":"2020","journal-title":"arXiv preprint arXiv"},{"key":"ref5","article-title":"Chain-of-Thought Prompting Elicits Reasoning in Large Language Models","author":"Wei","year":"2023","journal-title":"arXiv preprint arXiv"},{"key":"ref6","article-title":"Legal Prompting: Teaching a Language Model to Think Like a Lawyer","author":"Yu","year":"2022","journal-title":"arXiv preprint arXiv"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1145\/3594536.3595170"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.2139\/ssrn.4476325"},{"key":"ref9","article-title":"Efficient and Effective Text Encoding for Chinese LLaMA and Alpaca","author":"Cui","year":"2024","journal-title":"arXiv preprint arXiv"},{"key":"ref10","article-title":"Lawyer LLaMA Technical Report","author":"Huang","year":"2023","journal-title":"arXiv preprint arXiv"},{"key":"ref11","article-title":"Chatlaw: A Multi-Agent Collaborative Legal Assistant with Knowledge Graph Enhanced Mixture-of-Experts Large Language Model","author":"Cui","year":"2024","journal-title":"arXiv preprint arXiv"},{"key":"ref12","article-title":"LoRA: Low-Rank Adaptation of Large Language Models","author":"Hu","year":"2021","journal-title":"arXiv preprint arXiv"},{"key":"ref13","article-title":"BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding","author":"Devlin","year":"2019","journal-title":"arXiv preprint arXiv"},{"key":"ref14","article-title":"SlimPajama-DC: Understanding Data Combinations for LLM Training","author":"Shen","year":"2024","journal-title":"arXiv preprint arXiv"},{"key":"ref15","article-title":"SaulLM-7B: A pioneering Large Language Model for Law","author":"Colombo","year":"2024","journal-title":"arXiv preprint arXiv"},{"key":"ref16","article-title":"Adapting Large Language Models via Reading Comprehension","author":"Cheng","year":"2024","journal-title":"arXiv preprint arXiv"},{"key":"ref17","article-title":"DISC-LawLLM: Fine-tuning Large Language Models for Intelligent Legal Services","author":"Yue","year":"2023","journal-title":"arXiv preprint arXiv"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2024.acl-long.296"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2024.emnlp-main.148"},{"key":"ref20","article-title":"LoRA Learns Less and Forgets Less","author":"Biderman","year":"2024","journal-title":"arXiv preprint arXiv"},{"key":"ref21","article-title":"ViLLM-Eval: A Comprehensive Evaluation Suite for Vietnamese Large Language Models","author":"Nguyen","year":"2024","journal-title":"arXiv preprint arXiv"}],"event":{"name":"2024 16th International Conference on Knowledge and System Engineering (KSE)","location":"Kuala Lumpur, Malaysia","start":{"date-parts":[[2024,11,5]]},"end":{"date-parts":[[2024,11,7]]}},"container-title":["2024 16th International Conference on Knowledge and System Engineering (KSE)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/11063473\/11063476\/11063523.pdf?arnumber=11063523","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2026,2,20]],"date-time":"2026-02-20T21:11:11Z","timestamp":1771621871000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/11063523\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,11,5]]},"references-count":21,"URL":"https:\/\/doi.org\/10.1109\/kse63888.2024.11063523","relation":{},"subject":[],"published":{"date-parts":[[2024,11,5]]}}}