{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,10,14]],"date-time":"2025-10-14T00:59:00Z","timestamp":1760403540749,"version":"build-2065373602"},"reference-count":26,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","license":[{"start":{"date-parts":[[2025,1,1]],"date-time":"2025-01-01T00:00:00Z","timestamp":1735689600000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/creativecommons.org\/licenses\/by-nc-nd\/4.0\/"}],"funder":[{"name":"clinical medical research project of Wuhan Children\u2019s Hospital","award":["2022FE020"],"award-info":[{"award-number":["2022FE020"]}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["72401287"],"award-info":[{"award-number":["72401287"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Access"],"published-print":{"date-parts":[[2025]]},"DOI":"10.1109\/access.2025.3612034","type":"journal-article","created":{"date-parts":[[2025,9,19]],"date-time":"2025-09-19T17:38:25Z","timestamp":1758303505000},"page":"173510-173519","source":"Crossref","is-referenced-by-count":0,"title":["SegAdapt: Bridging Domain Gaps in Pre-Trained Models via Structured Injection and Mask Optimization"],"prefix":"10.1109","volume":"13","author":[{"ORCID":"https:\/\/orcid.org\/0000-0002-9217-8759","authenticated-orcid":false,"given":"W.","family":"Bo","sequence":"first","affiliation":[{"name":"National University of Defense Technology, Changsha, China"}]},{"given":"J.","family":"Xuping","sequence":"additional","affiliation":[{"name":"Information Support Force Engineering University, Wuhan, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-3212-8925","authenticated-orcid":false,"given":"Y.","family":"Yaqian","sequence":"additional","affiliation":[{"name":"Information Support Force Engineering University, Wuhan, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-0780-0812","authenticated-orcid":false,"given":"M.","family":"Xiaofeng","sequence":"additional","affiliation":[{"name":"Wuhan Children&#x2019;s Hospital (Wuhan Maternal and Child Healthcare Hospital), Tongji Medical College, Huazhong University of Science and Technology, Wuhan, China"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2025.findings-naacl.239"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2022.findings-emnlp.329"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1145\/3458754"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.3390\/app13052789"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.1016\/j.neucom.2023.126488"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-031-44693-1_26"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.3389\/fpls.2022.1053449"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2023.findings-acl.82"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.acl-main.740"},{"key":"ref10","article-title":"LightNER: A lightweight tuning paradigm for low-resource NER via pluggable prompting","author":"Chen","year":"2021","journal-title":"arXiv:2109.00720"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.1145\/3497842"},{"key":"ref12","doi-asserted-by":"publisher","DOI":"10.1109\/taslp.2021.3124365"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1109\/taslp.2022.3153261"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1109\/tits.2022.3182371"},{"key":"ref15","article-title":"Continual pre-training of large language models: How to (re)warm your model?","author":"Gupta","year":"2023","journal-title":"arXiv:2308.04014"},{"key":"ref16","article-title":"Continual pre-training of language models","author":"Ke","year":"2023","journal-title":"arXiv:2302.03241"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1016\/j.neunet.2024.106492"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1145\/3580305.3599891"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2022.wassa-1.22"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2022.acl-long.408"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1145\/3477495.3531886"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2024.findings-naacl.68"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2023.findings-emnlp.881"},{"key":"ref24","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/p19-1139"},{"key":"ref25","article-title":"ALBERT: A lite BERT for self-supervised learning of language representations","author":"Lan","year":"2019","journal-title":"arXiv:1909.11942"},{"key":"ref26","doi-asserted-by":"publisher","DOI":"10.48550\/ARXIV.1907.11692"}],"container-title":["IEEE Access"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/6287639\/10820123\/11173673.pdf?arnumber=11173673","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,10,13]],"date-time":"2025-10-13T17:41:55Z","timestamp":1760377315000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/11173673\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025]]},"references-count":26,"URL":"https:\/\/doi.org\/10.1109\/access.2025.3612034","relation":{},"ISSN":["2169-3536"],"issn-type":[{"type":"electronic","value":"2169-3536"}],"subject":[],"published":{"date-parts":[[2025]]}}}