{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,3,26]],"date-time":"2025-03-26T12:58:48Z","timestamp":1742993928629,"version":"3.40.3"},"publisher-location":"Cham","reference-count":33,"publisher":"Springer Nature Switzerland","isbn-type":[{"type":"print","value":"9783031624940"},{"type":"electronic","value":"9783031624957"}],"license":[{"start":{"date-parts":[[2024,1,1]],"date-time":"2024-01-01T00:00:00Z","timestamp":1704067200000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"},{"start":{"date-parts":[[2024,1,1]],"date-time":"2024-01-01T00:00:00Z","timestamp":1704067200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2024]]},"DOI":"10.1007\/978-3-031-62495-7_7","type":"book-chapter","created":{"date-parts":[[2024,6,21]],"date-time":"2024-06-21T20:19:24Z","timestamp":1719001164000},"page":"82-92","update-policy":"https:\/\/doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":0,"title":["Comparative Analysis of Large Language Models in Structured Information Extraction from Job Postings"],"prefix":"10.1007","author":[{"given":"Kyriaki","family":"Sioziou","sequence":"first","affiliation":[]},{"given":"Panagiotis","family":"Zervas","sequence":"additional","affiliation":[]},{"given":"Kostas","family":"Giotopoulos","sequence":"additional","affiliation":[]},{"given":"Giannis","family":"Tzimas","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2024,6,22]]},"reference":[{"doi-asserted-by":"publisher","unstructured":"Vaswani, A., et al.: Attention is all you need. In: Advances in Neural Information Processing Systems 30: Annual Conference on Neural Information Processing Systems 2017, Long Beach, CA, USA, 4\u20139 December 2017, pp. 5998\u20136008 (2017). https:\/\/doi.org\/10.48550\/arXiv.1706.03762","key":"7_CR1","DOI":"10.48550\/arXiv.1706.03762"},{"key":"7_CR2","doi-asserted-by":"publisher","first-page":"84559","DOI":"10.1109\/ACCESS.2021.3087913","volume":"9","author":"D Vukadin","year":"2021","unstructured":"Vukadin, D., Kurdija, A.S., Dela\u010d, G., \u0160ili\u0107, M.: Information extraction from free-form CV documents in multiple languages. IEEE Access 9, 84559\u201384575 (2021). https:\/\/doi.org\/10.1109\/ACCESS.2021.3087913","journal-title":"IEEE Access"},{"doi-asserted-by":"publisher","unstructured":"Wei, J., et al.: Emergent abilities of large language models (2022). https:\/\/doi.org\/10.48550\/arXiv.2206.07682","key":"7_CR3","DOI":"10.48550\/arXiv.2206.07682"},{"doi-asserted-by":"publisher","unstructured":"Brown, T., et al.: Language models are few-shot learners. In: Advances in NeurIPS, vol. 33, pp. 1877\u20131901. Curran Associates, Inc., (2020). https:\/\/doi.org\/10.48550\/arXiv.2005.14165","key":"7_CR4","DOI":"10.48550\/arXiv.2005.14165"},{"doi-asserted-by":"publisher","unstructured":"Ye, J., et al.: A comprehensive capability analysis of GPT-3 and GPT-3.5 series models (2023). https:\/\/doi.org\/10.48550\/arXiv.2303.10420","key":"7_CR5","DOI":"10.48550\/arXiv.2303.10420"},{"unstructured":"OpenAI developer platform. https:\/\/platform.openai.com\/docs\/models\/gpt-3-5","key":"7_CR6"},{"doi-asserted-by":"publisher","unstructured":"Touvron, H., et al.: Llama 2: open foundation and fine-tuned chat models (2023). https:\/\/doi.org\/10.48550\/arXiv.2307.09288","key":"7_CR7","DOI":"10.48550\/arXiv.2307.09288"},{"doi-asserted-by":"publisher","unstructured":"Jiang, A.Q., et al.: Mistral 7B (2023). https:\/\/doi.org\/10.48550\/arXiv.2310.06825","key":"7_CR8","DOI":"10.48550\/arXiv.2310.06825"},{"doi-asserted-by":"publisher","unstructured":"Ainslie, J., Lee-Thorp, J., de Jong, M., Zemlyanskiy, Y., Lebr\u00f3n, F., Sanghai, S.: GQA: training generalized multi-query transformer models from multi-head checkpoints (2023). https:\/\/doi.org\/10.48550\/arXiv.2305.13245","key":"7_CR9","DOI":"10.48550\/arXiv.2305.13245"},{"doi-asserted-by":"publisher","unstructured":"Child, R., Gray, S., Radford, A., Sutskever, I.: Generating long sequences with sparse transformers (2019). https:\/\/doi.org\/10.48550\/arXiv.1904.10509","key":"7_CR10","DOI":"10.48550\/arXiv.1904.10509"},{"doi-asserted-by":"publisher","unstructured":"Beltagy, I., Peters, M.E., Cohan, A.: Longformer: the long-document transformer (2020). https:\/\/doi.org\/10.48550\/arXiv.2004.05150","key":"7_CR11","DOI":"10.48550\/arXiv.2004.05150"},{"unstructured":"pankajmathur\/orca_mini_3b \u2013 Hugging Face. https:\/\/huggingface.co\/pankajmathur\/orca_mini_3b","key":"7_CR12"},{"doi-asserted-by":"publisher","unstructured":"Mukherjee, S., Mitra, A., Jawahar, G., Agarwal, S., Palangi, H., Awadallah, A.: Orca: progressive learning from complex explanation traces of GPT-4 (2023). https:\/\/doi.org\/10.48550\/arXiv.2306.02707","key":"7_CR13","DOI":"10.48550\/arXiv.2306.02707"},{"unstructured":"Mohammed. Types of open source & closed source LLMs (Large language Models). Medium (2023). https:\/\/medium.com\/@techlatest.net\/types-of-open-source-llms-large-language-models-3b7d8b8d1af2","key":"7_CR14"},{"doi-asserted-by":"publisher","unstructured":"Yu, H., et al.: Open, Closed, or Small Language Models for Text Classification? (2023). https:\/\/doi.org\/10.48550\/arXiv.2308.10092","key":"7_CR15","DOI":"10.48550\/arXiv.2308.10092"},{"doi-asserted-by":"publisher","unstructured":"Devlin, J., et al.: BERT: pre-training of deep bidirectional transformers for language understanding (2018). https:\/\/doi.org\/10.48550\/arXiv.1810.04805","key":"7_CR16","DOI":"10.48550\/arXiv.1810.04805"},{"doi-asserted-by":"publisher","unstructured":"Singh, S.: Natural language processing for information extraction (2018). https:\/\/doi.org\/10.48550\/arXiv.1807.02383","key":"7_CR17","DOI":"10.48550\/arXiv.1807.02383"},{"doi-asserted-by":"publisher","unstructured":"Li, N., Kang, B., De Bie, T.: LLM4Jobs: unsupervised occupation extraction and standardization leveraging Large Language Models (2023). https:\/\/doi.org\/10.48550\/arXiv.2309.09708","key":"7_CR18","DOI":"10.48550\/arXiv.2309.09708"},{"doi-asserted-by":"publisher","unstructured":"Jeong, C.: Fine-tuning and Utilization Methods of Domain-specific LLMs (2024). https:\/\/doi.org\/10.48550\/arXiv.2401.02981","key":"7_CR19","DOI":"10.48550\/arXiv.2401.02981"},{"doi-asserted-by":"publisher","unstructured":"Wei, J., et al.: Finetuned Language Models are Zero-Shot Learners (2021). https:\/\/doi.org\/10.48550\/arXiv.2109.01652","key":"7_CR20","DOI":"10.48550\/arXiv.2109.01652"},{"doi-asserted-by":"publisher","unstructured":"Ghosh, P., Sadaphal, V.: JobRecoGPT\u2013explainable job recommendations using LLMs (2023). https:\/\/doi.org\/10.48550\/arXiv.2309.11805","key":"7_CR21","DOI":"10.48550\/arXiv.2309.11805"},{"unstructured":"GPT4All. https:\/\/gpt4all.io","key":"7_CR22"},{"unstructured":"Skondras, P.: Panagiotis-Skondras\/Informatics: MDPI Information Paper. GitHub. https:\/\/github.com\/Panagiotis-Skondras\/informatics","key":"7_CR23"},{"doi-asserted-by":"publisher","unstructured":"LeCun, Y., Bengio, Y., Hinton, G.: Deep learning. Nature 521(7553), 436\u2013444 (2015). https:\/\/doi.org\/10.1038\/nature14539","key":"7_CR24","DOI":"10.1038\/nature14539"},{"unstructured":"Radford, A., Narasimhan, K., Salimans, T., Sutskever, I.: Improving language understanding by generative pre-training (2018). https:\/\/openai.com\/research\/language-unsupervised","key":"7_CR25"},{"issue":"1","key":"7_CR26","doi-asserted-by":"publisher","first-page":"24","DOI":"10.1038\/s41591-018-0316-z","volume":"25","author":"A Esteva","year":"2019","unstructured":"Esteva, A., et al.: A guide to deep learning in healthcare. Nat. Med. 25(1), 24\u201329 (2019). https:\/\/doi.org\/10.1038\/s41591-018-0316-z","journal-title":"Nat. Med."},{"doi-asserted-by":"publisher","unstructured":"Hu, Z., Liu, W., Bian, J., Liu, X., Liu, T.Y.: Listening to chaotic whispers: a deep learning framework for news-oriented stock trend prediction. In: Proceedings of the Eleventh ACM International Conference on Web Search and Data Mining, pp. 261\u2013269 (2018). https:\/\/doi.org\/10.48550\/arXiv.1712.02136","key":"7_CR27","DOI":"10.48550\/arXiv.1712.02136"},{"doi-asserted-by":"publisher","unstructured":"Bender, E.M., Gebru, T., McMillan-Major, A., Shmitchell, S.: On the dangers of stochastic parrots: can language models be too big? In: Proceedings of the 2021 ACM Conference on Fairness, Accountability, and Transparency, pp. 610\u2013623 (2021). https:\/\/doi.org\/10.1145\/3442188.3445922","key":"7_CR28","DOI":"10.1145\/3442188.3445922"},{"doi-asserted-by":"publisher","unstructured":"Howard, J., Ruder, S.: Universal language model fine-tuning for text classification (2018). https:\/\/doi.org\/10.48550\/arXiv.1801.06146","key":"7_CR29","DOI":"10.48550\/arXiv.1801.06146"},{"doi-asserted-by":"publisher","unstructured":"Rajkomar, A., Oren, E., Chen, K., et al.: Scalable and accurate deep learning with electronic health records. NPJ Digi. Med. 1, 18 (2018). https:\/\/doi.org\/10.1038\/s41746-018-0029-1","key":"7_CR30","DOI":"10.1038\/s41746-018-0029-1"},{"doi-asserted-by":"publisher","unstructured":"Kalantzi, O., Tsiotas, D., Polyzos, S.: The contribution of tourism in national economies: evidence of Greece (2023). https:\/\/doi.org\/10.48550\/arXiv.2302.13121","key":"7_CR31","DOI":"10.48550\/arXiv.2302.13121"},{"doi-asserted-by":"publisher","unstructured":"Eloundou, T., et al.: GPTs are GPTs: an early look at the labor market impact potential of large language models (2023). https:\/\/doi.org\/10.48550\/arXiv.2303.10130","key":"7_CR32","DOI":"10.48550\/arXiv.2303.10130"},{"unstructured":"Open-LLM-leaderboard (open LLM leaderboard). https:\/\/huggingface.co\/open-llm-leaderboard","key":"7_CR33"}],"container-title":["Communications in Computer and Information Science","Engineering Applications of Neural Networks"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/978-3-031-62495-7_7","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2024,6,21]],"date-time":"2024-06-21T20:20:36Z","timestamp":1719001236000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/978-3-031-62495-7_7"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024]]},"ISBN":["9783031624940","9783031624957"],"references-count":33,"URL":"https:\/\/doi.org\/10.1007\/978-3-031-62495-7_7","relation":{},"ISSN":["1865-0929","1865-0937"],"issn-type":[{"type":"print","value":"1865-0929"},{"type":"electronic","value":"1865-0937"}],"subject":[],"published":{"date-parts":[[2024]]},"assertion":[{"value":"22 June 2024","order":1,"name":"first_online","label":"First Online","group":{"name":"ChapterHistory","label":"Chapter History"}},{"value":"EANN","order":1,"name":"conference_acronym","label":"Conference Acronym","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"International Conference on Engineering Applications of Neural Networks","order":2,"name":"conference_name","label":"Conference Name","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"Corfu","order":3,"name":"conference_city","label":"Conference City","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"Greece","order":4,"name":"conference_country","label":"Conference Country","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"2024","order":5,"name":"conference_year","label":"Conference Year","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"27 June 2024","order":7,"name":"conference_start_date","label":"Conference Start Date","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"30 June 2024","order":8,"name":"conference_end_date","label":"Conference End Date","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"25","order":9,"name":"conference_number","label":"Conference Number","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"eann2024","order":10,"name":"conference_id","label":"Conference ID","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"https:\/\/eannconf.org\/2024\/","order":11,"name":"conference_url","label":"Conference URL","group":{"name":"ConferenceInfo","label":"Conference Information"}}]}}