{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,11,21]],"date-time":"2025-11-21T11:32:55Z","timestamp":1763724775632,"version":"3.35.0"},"reference-count":93,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","issue":"2","license":[{"start":{"date-parts":[[2025,2,1]],"date-time":"2025-02-01T00:00:00Z","timestamp":1738368000000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2025,2,1]],"date-time":"2025-02-01T00:00:00Z","timestamp":1738368000000},"content-version":"am","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2025,2,1]],"date-time":"2025-02-01T00:00:00Z","timestamp":1738368000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2025,2,1]],"date-time":"2025-02-01T00:00:00Z","timestamp":1738368000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"name":"National Science Foundation","award":["2340949","2419880"],"award-info":[{"award-number":["2340949","2419880"]}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Trans. Circuits Syst. I"],"published-print":{"date-parts":[[2025,2]]},"DOI":"10.1109\/tcsi.2024.3487486","type":"journal-article","created":{"date-parts":[[2024,11,26]],"date-time":"2024-11-26T19:00:49Z","timestamp":1732647649000},"page":"623-636","source":"Crossref","is-referenced-by-count":6,"title":["A Generalize Hardware Debugging Approach for Large Language Models Semi-Synthetic, Datasets"],"prefix":"10.1109","volume":"72","author":[{"ORCID":"https:\/\/orcid.org\/0000-0002-9623-6522","authenticated-orcid":false,"given":"Weimin","family":"Fu","sequence":"first","affiliation":[{"name":"Mike Wiegers Department of Electrical and Computer Engineering, Kansas State University, Manhattan, KS, USA"}]},{"ORCID":"https:\/\/orcid.org\/0009-0004-6343-6941","authenticated-orcid":false,"given":"Shijie","family":"Li","sequence":"additional","affiliation":[{"name":"School of Cyber Science and Technology, University of Science and Technology of China, Hefei, Anhui, China"}]},{"ORCID":"https:\/\/orcid.org\/0009-0000-1874-4567","authenticated-orcid":false,"given":"Yifang","family":"Zhao","sequence":"additional","affiliation":[{"name":"School of Cyber Science and Technology, University of Science and Technology of China, Hefei, Anhui, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-1027-6708","authenticated-orcid":false,"given":"Kaichen","family":"Yang","sequence":"additional","affiliation":[{"name":"Department of Electrical and Computer Engineering, Michigan Technological University, Houghton, MI, USA"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-0482-5435","authenticated-orcid":false,"given":"Xuan","family":"Zhang","sequence":"additional","affiliation":[{"name":"Department of Electrical and Computer Engineering, Northeastern University, Boston, MA, USA"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-8791-0597","authenticated-orcid":false,"given":"Yier","family":"Jin","sequence":"additional","affiliation":[{"name":"School of Cyber Science and Technology, University of Science and Technology of China, Hefei, Anhui, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-9896-9407","authenticated-orcid":false,"given":"Xiaolong","family":"Guo","sequence":"additional","affiliation":[{"name":"Mike Wiegers Department of Electrical and Computer Engineering, Kansas State University, Manhattan, KS, USA"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1145\/3620666.3651346"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.1109\/TCAD.2023.3341750"},{"key":"ref3","first-page":"1877","article-title":"Language models are few-shot learners","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"33","author":"Brown"},{"key":"ref4","article-title":"DoctorGLM: Fine-tuning your Chinese doctor is not a herculean task","author":"Xiong","year":"2023","journal-title":"arXiv:2304.01097"},{"key":"ref5","article-title":"DISC-LawLLM: Fine-tuning large language models for intelligent legal services","author":"Yue","year":"2023","journal-title":"arXiv:2309.11325"},{"key":"ref6","article-title":"Evaluating large language models trained on code","author":"Chen","year":"2021","journal-title":"arXiv:2107.03374"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2024.findings-acl.49"},{"volume-title":"Claude 3.5 Sonnet","year":"2024","key":"ref8"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.1109\/ICCAD57390.2023.10323812"},{"key":"ref10","article-title":"OriGen: Enhancing RTL code generation with code-to-code augmentation and self-reflection","author":"Cui","year":"2024","journal-title":"arXiv:2407.16237"},{"volume-title":"Introducing the Next Generation of Claude","year":"2024","key":"ref11"},{"key":"ref12","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2022.emnlp-main.225"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1109\/ASP-DAC58780.2024.10473927"},{"key":"ref14","article-title":"Training compute-optimal large language models","author":"Hoffmann","year":"2022","journal-title":"arXiv:2203.15556"},{"key":"ref15","article-title":"ChipNeMo: Domain-adapted LLMs for chip design","author":"Liu","year":"2023","journal-title":"arXiv:2311.00176"},{"key":"ref16","article-title":"RTLCoder: Fully open-source and efficient LLM-assisted RTL code generation technique","author":"Liu","year":"2023","journal-title":"arXiv:2312.08617"},{"issue":"2","key":"ref17","first-page":"139","article-title":"The five W\u2019s: An old tool for the new task of task analysis","volume":"43","author":"Hart","year":"1996","journal-title":"Tech. Commun."},{"key":"ref18","article-title":"Chatbot arena: An open platform for evaluating llms by human preference","author":"Chiang","year":"2024","journal-title":"arXiv:2403.04132"},{"key":"ref19","article-title":"GPT-4 technical report","volume-title":"arXiv:2303.08774","author":"Achiam","year":"2023"},{"key":"ref20","article-title":"LLM-aided testbench generation and bug detection for finite-state machines","author":"Bhandari","year":"2024","journal-title":"arXiv:2406.17132"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1002\/j.1538-7305.1948.tb01338.x"},{"key":"ref22","article-title":"CodeGen: An open large language model for code with multi-turn program synthesis","author":"Nijkamp","year":"2022","journal-title":"arXiv:2203.13474"},{"key":"ref23","article-title":"Code llama: Open foundation models for code","author":"Rozi\u00e8re","year":"2023","journal-title":"arXiv:2308.12950"},{"key":"ref24","article-title":"StarCoder: May the source be with you!","author":"Li","year":"2023","journal-title":"arXiv:2305.06161"},{"key":"ref25","article-title":"WizardCoder: Empowering code large language models with evol-instruct","author":"Luo","year":"2023","journal-title":"arXiv:2306.08568"},{"key":"ref26","article-title":"OctoPack: Instruction tuning code large language models","author":"Muennighoff","year":"2023","journal-title":"arXiv:2308.07124"},{"key":"ref27","doi-asserted-by":"publisher","DOI":"10.1109\/AsianHOST59942.2023.10409307"},{"key":"ref28","article-title":"RLTF: Reinforcement learning from unit test feedback","author":"Liu","year":"2023","journal-title":"arXiv:2307.04349"},{"key":"ref29","article-title":"PanGu-coder2: Boosting large language models for code with ranking feedback","author":"Shen","year":"2023","journal-title":"arXiv:2307.14936"},{"key":"ref30","article-title":"CodeT: Code generation with generated tests","author":"Chen","year":"2022","journal-title":"arXiv:2207.10397"},{"key":"ref31","first-page":"26106","article-title":"LEVER: Learning to verify language-to-code generation with execution","volume-title":"Proc. 40th Int. Conf. Mach. Learn.","author":"Ni"},{"key":"ref32","article-title":"LLM-assisted generation of hardware assertions","author":"Kande","year":"2023","journal-title":"arXiv:2306.14027"},{"key":"ref33","article-title":"Fixing hardware security bugs with large language models","author":"Ahmad","year":"2023","journal-title":"arXiv:2302.01215"},{"key":"ref34","article-title":"AssertLLM: Generating and evaluating hardware verification assertions from design specifications via multi-LLMs","author":"Fang","year":"2024","journal-title":"arXiv:2402.00386"},{"key":"ref35","article-title":"DIVAS: An LLM-based end-to-end framework for SoC security analysis and policy-based protection","author":"Paria","year":"2023","journal-title":"arXiv:2308.06932"},{"key":"ref36","article-title":"BigCodeBench: Benchmarking code generation with diverse function calls and complex instructions","author":"Zhuo","year":"2024","journal-title":"arXiv:2406.15877"},{"article-title":"SWE-bench: Can language models resolve real-world Github issues?","volume-title":"Proc. 12th Int. Conf. Learn. Represent","author":"Jimenez","key":"ref37"},{"key":"ref38","article-title":"Compression represents intelligence linearly","author":"Huang","year":"2024","journal-title":"arXiv:2404.09937"},{"key":"ref39","article-title":"Exploring the limits of transfer learning with a unified text-to-text transformer","author":"Raffel","year":"2019","journal-title":"arXiv:1910.10683"},{"volume-title":"RedPajama: An open source recipe to Reproduce Llama Training Dataset","year":"2023","key":"ref40"},{"key":"ref41","article-title":"Tinyllama: An open-source small language model","author":"Zhang","year":"2024","journal-title":"arXiv:2401.02385"},{"key":"ref42","article-title":"Textbooks are all you need","author":"Gunasekar","year":"2023","journal-title":"arXiv:2306.11644"},{"key":"ref43","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR52733.2024.02484"},{"volume-title":"Stanford Alpaca: An Instruction-Following Llama Model","year":"2023","author":"Taori","key":"ref44"},{"key":"ref45","article-title":"The curse of recursion: Training on generated data makes models forget","volume":"abs\/2305.17493","author":"Shumailov","year":"2023","journal-title":"CoRR"},{"volume-title":"OpenAI Has Hired an Army of Contractors to Make Basic Coding Obsolete","year":"2024","author":"Mendoza","key":"ref46"},{"volume-title":"No Robots","year":"2023","author":"Rajani","key":"ref47"},{"key":"ref48","first-page":"1","article-title":"Openassistant conversations\u2014Democratizing large language model alignment","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"36","author":"K\u00f6pf"},{"key":"ref49","article-title":"Self-instruct: Aligning language models with self-generated instructions","author":"Wang","year":"2022","journal-title":"arXiv:2212.10560"},{"key":"ref50","article-title":"Instruction tuning with GPT-4","author":"Peng","year":"2023","journal-title":"arXiv:2304.03277"},{"key":"ref51","doi-asserted-by":"publisher","DOI":"10.1145\/3643681"},{"key":"ref52","first-page":"1","article-title":"Fast and wrong: The case for formally specifying hardware with LLMS","volume-title":"Proc. Int. Conf. Architectural Support Program. Lang. Operating Syst. (ASPLOS)","author":"Srikumar"},{"key":"ref53","article-title":"Training a helpful and harmless assistant with reinforcement learning from human feedback","author":"Bai","year":"2022","journal-title":"arXiv:2204.05862"},{"key":"ref54","article-title":"WebGPT: Browser-assisted question-answering with human feedback","author":"Nakano","year":"2021","journal-title":"arXiv:2112.09332"},{"key":"ref55","article-title":"AutoChip: Automating HDL generation using LLM feedback","author":"Thakur","year":"2023","journal-title":"arXiv:2311.04887"},{"key":"ref56","article-title":"Leveraging reinforcement learning and large language models for code optimization","author":"Duan","year":"2023","journal-title":"arXiv:2312.05657"},{"key":"ref57","article-title":"SynCode: LLM generation with grammar augmentation","author":"Ugare","year":"2024","journal-title":"arXiv:2403.01632"},{"key":"ref58","article-title":"LoRA: Low-rank adaptation of large language models","author":"Hu","year":"2021","journal-title":"arXiv:2106.09685"},{"key":"ref59","article-title":"LLaMA-adapter: Efficient fine-tuning of language models with zero-init attention","author":"Zhang","year":"2023","journal-title":"arXiv:2303.16199"},{"volume-title":"Version Control With Git","year":"2022","author":"Ponuthorai","key":"ref60"},{"key":"ref61","doi-asserted-by":"publisher","DOI":"10.1093\/owc\/9780199538607.001.0001"},{"key":"ref62","doi-asserted-by":"publisher","DOI":"10.1080\/10584609.1993.9962963"},{"key":"ref63","doi-asserted-by":"publisher","DOI":"10.1109\/PATMOS.2017.8106976"},{"key":"ref64","first-page":"10","article-title":"Titan: Enabling a transparent silicon root of trust for cloud","volume-title":"Proc. Hot Chips: Symp. High Perform. Chips","volume":"194","author":"Johnson"},{"key":"ref65","doi-asserted-by":"publisher","DOI":"10.1109\/S3S.2018.8640145"},{"key":"ref66","doi-asserted-by":"publisher","DOI":"10.1109\/TVLSI.2017.2654506"},{"key":"ref67","doi-asserted-by":"publisher","DOI":"10.1109\/TVLSI.2019.2950087"},{"key":"ref68","doi-asserted-by":"publisher","DOI":"10.1145\/2872362.2872414"},{"key":"ref69","doi-asserted-by":"publisher","DOI":"10.23919\/FPL.2017.8056766"},{"key":"ref70","doi-asserted-by":"publisher","DOI":"10.1109\/TVLSI.2019.2926114"},{"key":"ref71","doi-asserted-by":"publisher","DOI":"10.1109\/hotchips.2015.7477460"},{"key":"ref72","article-title":"Think you have solved question answering? Try ARC, the AI2 reasoning challenge","author":"Clark","year":"2018","journal-title":"arXiv:1803.05457"},{"key":"ref73","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/P19-1472"},{"key":"ref74","article-title":"Measuring massive multitask language understanding","author":"Hendrycks","year":"2020","journal-title":"arXiv:2009.03300"},{"key":"ref75","article-title":"TruthfulQA: Measuring how models mimic human falsehoods","author":"Lin","year":"2021","journal-title":"arXiv:2109.07958"},{"key":"ref76","doi-asserted-by":"publisher","DOI":"10.1145\/3474381"},{"key":"ref77","article-title":"Training verifiers to solve math word problems","author":"Cobbe","year":"2021","journal-title":"arXiv:2110.14168"},{"volume-title":"LLM Leaderboard","year":"2024","key":"ref78"},{"volume-title":"Open LLM Leaderboard","year":"2023","author":"Beeching","key":"ref79"},{"volume-title":"Free Dolly: Introducing the World\u2019s First Truly Open Instruction-Tuned LLM","year":"2023","author":"Conover","key":"ref80"},{"key":"ref81","article-title":"Llama 2: Open foundation and fine-tuned chat models","author":"Touvron","year":"2023","journal-title":"arXiv:2307.09288"},{"key":"ref82","article-title":"Textbooks are all you need II: Phi-1.5 technical report","volume-title":"arXiv:2309.05463","author":"Li","year":"2023"},{"volume-title":"Phi-2","year":"2024","key":"ref83"},{"key":"ref84","article-title":"Mixtral of experts","author":"Jiang","year":"2024","journal-title":"arXiv:2401.04088"},{"volume-title":"Stable Code 3B","year":"2024","author":"Pinnaparaju","key":"ref85"},{"volume-title":"The falcon series of open language models","year":"2023","author":"Almazrouei","key":"ref86"},{"key":"ref87","first-page":"1","article-title":"Qlora: Efficient finetuning of quantized LLMs","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"36","author":"Dettmers"},{"volume-title":"Opentitan AES Mix Single Column Module","year":"2024","key":"ref88"},{"key":"ref89","doi-asserted-by":"publisher","DOI":"10.1109\/TIFS.2024.3374558"},{"key":"ref90","doi-asserted-by":"publisher","DOI":"10.1109\/SP46215.2023.10179420"},{"key":"ref91","doi-asserted-by":"publisher","DOI":"10.1109\/TIFS.2024.3372809"},{"key":"ref92","article-title":"HDLdebugger: Streamlining HDL debugging with large language models","author":"Yao","year":"2024","journal-title":"arXiv:2403.11671"},{"key":"ref93","doi-asserted-by":"publisher","DOI":"10.1109\/ASP-DAC58780.2024.10473904"}],"container-title":["IEEE Transactions on Circuits and Systems I: Regular Papers"],"original-title":[],"link":[{"URL":"https:\/\/ieeexplore.ieee.org\/ielam\/8919\/10857679\/10767852-aam.pdf","content-type":"application\/pdf","content-version":"am","intended-application":"syndication"},{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/8919\/10857679\/10767852.pdf?arnumber=10767852","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,1,30]],"date-time":"2025-01-30T05:48:48Z","timestamp":1738216128000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10767852\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,2]]},"references-count":93,"journal-issue":{"issue":"2"},"URL":"https:\/\/doi.org\/10.1109\/tcsi.2024.3487486","relation":{},"ISSN":["1549-8328","1558-0806"],"issn-type":[{"type":"print","value":"1549-8328"},{"type":"electronic","value":"1558-0806"}],"subject":[],"published":{"date-parts":[[2025,2]]}}}