{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,4,22]],"date-time":"2026-04-22T18:00:28Z","timestamp":1776880828056,"version":"3.51.2"},"reference-count":25,"publisher":"IEEE","license":[{"start":{"date-parts":[[2024,6,30]],"date-time":"2024-06-30T00:00:00Z","timestamp":1719705600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2024,6,30]],"date-time":"2024-06-30T00:00:00Z","timestamp":1719705600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2024,6,30]]},"DOI":"10.1109\/ijcnn60899.2024.10650138","type":"proceedings-article","created":{"date-parts":[[2024,9,9]],"date-time":"2024-09-09T17:35:05Z","timestamp":1725903305000},"page":"1-8","source":"Crossref","is-referenced-by-count":6,"title":["ChatLogic: Integrating Logic Programming with Large Language Models for Multi-Step Reasoning"],"prefix":"10.1109","author":[{"given":"Zhongsheng","family":"Wang","sequence":"first","affiliation":[{"name":"University of Auckland,School of Computer Science,Auckland,New Zealand"}]},{"given":"Jiamou","family":"Liu","sequence":"additional","affiliation":[{"name":"University of Auckland,School of Computer Science,Auckland,New Zealand"}]},{"given":"Qiming","family":"Bao","sequence":"additional","affiliation":[{"name":"University of Auckland,School of Computer Science,Auckland,New Zealand"}]},{"given":"Hongfei","family":"Rong","sequence":"additional","affiliation":[{"name":"University of Auckland,School of Computer Science,Auckland,New Zealand"}]},{"given":"Jingfeng","family":"Zhang","sequence":"additional","affiliation":[{"name":"University of Auckland,School of Computer Science,Auckland,New Zealand"}]}],"member":"263","reference":[{"key":"ref1","article-title":"Gpt-4 technical report","year":"2023"},{"key":"ref2","article-title":"Llama 2: Open foundation and fine-tuned chat models","author":"Touvron","year":"2023"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1038\/s41591-023-02448-8"},{"key":"ref4","article-title":"Scaling transformer to 1m tokens and beyond with rmt","author":"Bulatov","year":"2023"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2022.emnlp-main.382"},{"key":"ref6","first-page":"2206","article-title":"Improving language models by retrieving from trillions of tokens","volume-title":"International Conference on machine learning","author":"Borgeaud"},{"key":"ref7","article-title":"Demonstrate-search-predict: Composing retrieval and language models for knowledge-intensive nlp","author":"Khattab","year":"2022"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2023.findings-acl.67"},{"key":"ref9","article-title":"Selection-inference: Exploiting large language models for interpretable logical reasoning","author":"Creswell","year":"2022"},{"key":"ref10","article-title":"Self-consistency improves chain of thought reasoning in language models","author":"Wang","year":"2022"},{"key":"ref11","article-title":"Faithful reasoning using large language models","author":"Creswell","year":"2022"},{"key":"ref12","first-page":"24 824","article-title":"Chain-of-thought prompting elicits reasoning in large language models","volume":"35","author":"Wei","year":"2022","journal-title":"Advances in Neural Information Processing Systems"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2023.findings-acl.321"},{"key":"ref14","article-title":"Teaching large language models to self-debug","author":"Chen","year":"2023"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2023.findings-emnlp.248"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1145\/3386252"},{"key":"ref17","first-page":"1877","article-title":"Language models are few-shot learners","volume":"33","author":"Brown","year":"2020","journal-title":"Advances in neural information processing systems"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1145\/3411763.3451760"},{"key":"ref19","first-page":"22 199","article-title":"Large language models are zero-shot reasoners","volume":"35","author":"Kojima","year":"2022","journal-title":"Advances in neural information processing systems"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.1109\/iccv51070.2023.00280"},{"key":"ref21","article-title":"Solving math word problems by combining language models with symbolic solvers","author":"He-Yueya","year":"2023"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.1145\/3491101.3519665"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.1147\/sj.352.0151"},{"key":"ref24","article-title":"Chatdb: Augmenting llms with databases as their symbolic memory","author":"Hu","year":"2023"},{"key":"ref25","article-title":"Picolo: A simple python framework for introducing component principles","volume-title":"Euro Python Conference. Citeseer","author":"Marvie"}],"event":{"name":"2024 International Joint Conference on Neural Networks (IJCNN)","location":"Yokohama, Japan","start":{"date-parts":[[2024,6,30]]},"end":{"date-parts":[[2024,7,5]]}},"container-title":["2024 International Joint Conference on Neural Networks (IJCNN)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/10649807\/10649898\/10650138.pdf?arnumber=10650138","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2024,9,10]],"date-time":"2024-09-10T04:44:53Z","timestamp":1725943493000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10650138\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,6,30]]},"references-count":25,"URL":"https:\/\/doi.org\/10.1109\/ijcnn60899.2024.10650138","relation":{},"subject":[],"published":{"date-parts":[[2024,6,30]]}}}