{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,2,20]],"date-time":"2026-02-20T22:05:54Z","timestamp":1771625154739,"version":"3.50.1"},"reference-count":17,"publisher":"IEEE","license":[{"start":{"date-parts":[[2024,11,5]],"date-time":"2024-11-05T00:00:00Z","timestamp":1730764800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2024,11,5]],"date-time":"2024-11-05T00:00:00Z","timestamp":1730764800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2024,11,5]]},"DOI":"10.1109\/kse63888.2024.11063566","type":"proceedings-article","created":{"date-parts":[[2025,7,11]],"date-time":"2025-07-11T17:41:44Z","timestamp":1752255704000},"page":"01-06","source":"Crossref","is-referenced-by-count":0,"title":["Adapting Large Language Models to Vietnamese Law: Pretrained LLM Refinement vs Retrieval Augmented Generation"],"prefix":"10.1109","author":[{"given":"Nguyen P.","family":"Nguyen","sequence":"first","affiliation":[{"name":"Ton Duc Thang University,Natural Language Processing and Knowledge Discovery Laboratory, Faculty of Information Technology,Ho Chi Minh City,Vietnam"}]},{"given":"Thang V.Q.","family":"Le","sequence":"additional","affiliation":[{"name":"Ton Duc Thang University,Natural Language Processing and Knowledge Discovery Laboratory, Faculty of Information Technology,Ho Chi Minh City,Vietnam"}]},{"given":"Anh-Cuong","family":"Le","sequence":"additional","affiliation":[{"name":"Ton Duc Thang University,Natural Language Processing and Knowledge Discovery Laboratory, Faculty of Information Technology,Ho Chi Minh City,Vietnam"}]},{"given":"Viet-Ha","family":"Nguyen","sequence":"additional","affiliation":[{"name":"Institute for Artificial Intelligence, VNU University of Engineering and Technology,Hanoi,Vietnam"}]},{"given":"Viet-Cuong","family":"Nguyen","sequence":"additional","affiliation":[{"name":"Intelligent Integration Co., Ltd (INT2),Vietnam"}]}],"member":"263","reference":[{"key":"ref1","article-title":"Gpt-4 technical report","author":"Open","year":"2024"},{"key":"ref2","article-title":"Mistral 7b","author":"Jiang","year":"2023"},{"key":"ref3","volume-title":"Qwen2 technical report","author":"Yang","year":"2024"},{"key":"ref4","article-title":"Language models are few-shot learners","author":"Brown","year":"2020"},{"key":"ref5","article-title":"Palm: Scaling language modeling with pathways","author":"Chowdhery","year":"2022"},{"key":"ref6","article-title":"The falcon series of open language models","author":"Almazrouei","year":"2023"},{"key":"ref7","article-title":"Textbooks are all you need","author":"Gunasekar","year":"2023"},{"key":"ref8","article-title":"Phi-3 technical report: A highly capable language model locally on your phone","author":"Abdin","year":"2024"},{"key":"ref9","article-title":"Gemma: Open models based on gemini research and technology","author":"Team","year":"2024"},{"key":"ref10","article-title":"Qwen technical report","author":"Bai","year":"2023"},{"key":"ref11","article-title":"Saullm-7b: A pioneering large language model for law","author":"Colombo","year":"2024"},{"key":"ref12","article-title":"Chatlaw: A multi-agent collaborative legal assistant with knowledge graph enhanced mixture-of-experts large language model","author":"Cui","year":"2024"},{"key":"ref13","article-title":"Lawgpt: A chinese legal knowledge-enhanced large language model","author":"Zhou","year":"2024"},{"key":"ref14","article-title":"Lawyer llama technical report","author":"Huang","year":"2023"},{"key":"ref15","article-title":"Adapting large language models via reading comprehension","author":"Cheng","year":"2024"},{"key":"ref16","doi-asserted-by":"crossref","DOI":"10.18653\/v1\/2024.emnlp-main.148","article-title":"Instruction pre-training: Language models are supervised multitask learners","author":"Cheng","year":"2024"},{"key":"ref17","article-title":"Lora: Low-rank adaptation of large language models","author":"Hu","year":"2021"}],"event":{"name":"2024 16th International Conference on Knowledge and System Engineering (KSE)","location":"Kuala Lumpur, Malaysia","start":{"date-parts":[[2024,11,5]]},"end":{"date-parts":[[2024,11,7]]}},"container-title":["2024 16th International Conference on Knowledge and System Engineering (KSE)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/11063473\/11063476\/11063566.pdf?arnumber=11063566","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2026,2,20]],"date-time":"2026-02-20T21:11:11Z","timestamp":1771621871000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/11063566\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,11,5]]},"references-count":17,"URL":"https:\/\/doi.org\/10.1109\/kse63888.2024.11063566","relation":{},"subject":[],"published":{"date-parts":[[2024,11,5]]}}}