{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,5,6]],"date-time":"2026-05-06T15:28:41Z","timestamp":1778081321384,"version":"3.51.4"},"publisher-location":"New York, NY, USA","reference-count":27,"publisher":"ACM","license":[{"start":{"date-parts":[[2021,5,8]],"date-time":"2021-05-08T00:00:00Z","timestamp":1620432000000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.acm.org\/publications\/policies\/copyright_policy#Background"}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2021,5,8]]},"DOI":"10.1145\/3411763.3451760","type":"proceedings-article","created":{"date-parts":[[2021,5,8]],"date-time":"2021-05-08T00:36:14Z","timestamp":1620434174000},"page":"1-7","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":622,"title":["Prompt Programming for Large Language Models: Beyond the Few-Shot Paradigm"],"prefix":"10.1145","author":[{"given":"Laria","family":"Reynolds","sequence":"first","affiliation":[{"name":"University of Michigan, United States"}]},{"given":"Kyle","family":"McDonell","sequence":"additional","affiliation":[{"name":"KNC Neural Calculus, United States"}]}],"member":"320","published-online":{"date-parts":[[2021,5,8]]},"reference":[{"key":"e_1_3_2_1_1_1","doi-asserted-by":"publisher","DOI":"10.3115\/v1\/W14-3302"},{"key":"e_1_3_2_1_2_1","unstructured":"Gwern Branwen. 2020. GPT-3 Creative Fiction. (2020)."},{"key":"e_1_3_2_1_3_1","unstructured":"Tom\u00a0B Brown Benjamin Mann Nick Ryder Melanie Subbiah Jared Kaplan Prafulla Dhariwal Arvind Neelakantan Pranav Shyam Girish Sastry Amanda Askell 2020. Language models are few-shot learners. arXiv preprint arXiv:2005.14165(2020)."},{"key":"e_1_3_2_1_4_1","unstructured":"Marc-Alexandre C\u00f4t\u00e9 \u00c1kos K\u00e1d\u00e1r Xingdi Yuan Ben Kybartas Tavian Barnes Emery Fine James Moore Ruo\u00a0Yu Tao Matthew Hausknecht Layla\u00a0El Asri Mahmoud Adada Wendy Tay and Adam Trischler. 2019. TextWorld: A Learning Environment for Text-based Games. (2019). arxiv:1806.11532\u00a0[cs.LG]"},{"key":"e_1_3_2_1_5_1","volume-title":"Bert: Pre-training of deep bidirectional transformers for language understanding. arXiv preprint arXiv:1810.04805(2018).","author":"Devlin Jacob","year":"2018","unstructured":"Jacob Devlin, Ming-Wei Chang, Kenton Lee, and Kristina Toutanova. 2018. Bert: Pre-training of deep bidirectional transformers for language understanding. arXiv preprint arXiv:1810.04805(2018)."},{"key":"e_1_3_2_1_6_1","unstructured":"Angela Fan Mike Lewis and Yann Dauphin. 2018. Hierarchical Neural Story Generation. arxiv:1805.04833\u00a0[cs.CL]"},{"key":"e_1_3_2_1_7_1","doi-asserted-by":"crossref","unstructured":"Zhe Gan Yu Cheng Ahmed\u00a0El Kholy Linjie Li Jingjing Liu and Jianfeng Gao. 2019. Multi-step Reasoning via Recurrent Dual Attention for Visual Dialog. https:\/\/arxiv.org\/abs\/1902.00579","DOI":"10.18653\/v1\/P19-1648"},{"key":"e_1_3_2_1_8_1","volume-title":"Building AGI Using Language Models. leogao.dev","author":"Gao Leo","year":"2020","unstructured":"Leo Gao. 2020. Building AGI Using Language Models. leogao.dev (2020). https:\/\/bit.ly\/3rViLGk"},{"key":"e_1_3_2_1_9_1","unstructured":"Tianyu Gao Adam Fisch and Danqi Chen. 2020. Making Pre-trained Language Models Better Few-shot Learners. arxiv:2012.15723\u00a0[cs.CL]"},{"key":"e_1_3_2_1_10_1","unstructured":"Dan Hendrycks Collin Burns Steven Basart Andy Zou Mantas Mazeika Dawn Song and Jacob Steinhardt. 2020. Measuring massive multitask language understanding. (2020). https:\/\/arxiv.org\/abs\/2009.03300"},{"key":"e_1_3_2_1_11_1","unstructured":"Ari Holtzman Jan Buys Li Du Maxwell Forbes and Yejin Choi. 2020. The Curious Case of Neural Text Degeneration. arxiv:1904.09751\u00a0[cs.CL]"},{"key":"e_1_3_2_1_12_1","doi-asserted-by":"crossref","unstructured":"Jeremy Howard and Sebastian Ruder. 2018. Universal language model fine-tuning for text classification. (2018). https:\/\/arxiv.org\/abs\/1801.06146","DOI":"10.18653\/v1\/P18-1031"},{"key":"e_1_3_2_1_13_1","unstructured":"KaryoKleptid. 2020. Seems to work. https:\/\/bit.ly\/37dA1hY"},{"key":"e_1_3_2_1_14_1","unstructured":"KaryoKleptid. 2020. Teaching GPT-3 to do a brute force \u2019for loop\u2019 checking answers. https:\/\/bit.ly\/2N7khX1"},{"key":"e_1_3_2_1_15_1","volume-title":"CTRL: A Conditional Transformer Language Model for Controllable Generation. CoRR abs\/1909.05858(2019)","author":"Keskar Nitish\u00a0Shirish","year":"2019","unstructured":"Nitish\u00a0Shirish Keskar, Bryan McCann, Lav\u00a0R. Varshney, Caiming Xiong, and Richard Socher. 2019. CTRL: A Conditional Transformer Language Model for Controllable Generation. CoRR abs\/1909.05858(2019). http:\/\/arxiv.org\/abs\/1909.05858"},{"key":"e_1_3_2_1_16_1","doi-asserted-by":"crossref","unstructured":"Ben Krause Akhilesh\u00a0Deepak Gotmare Bryan McCann Nitish\u00a0Shirish Keskar Shafiq Joty Richard Socher and Nazneen\u00a0Fatema Rajani. 2020. GeDi: Generative Discriminator Guided Sequence Generation. arXiv preprint arXiv:2009.06367(2020).","DOI":"10.18653\/v1\/2021.findings-emnlp.424"},{"key":"e_1_3_2_1_17_1","unstructured":"Xiang\u00a0Lisa Li and Percy Liang. 2021. Prefix-Tuning: Optimizing Continuous Prompts for Generation. arXiv preprint arXiv:2101.00190(2021)."},{"key":"e_1_3_2_1_18_1","unstructured":"Jiangming Liu and Matt Gardner. 2020. Multi-Step Inference for Reasoning Over Paragraphs. arXiv preprint arXiv:2004.02995(2020)."},{"key":"e_1_3_2_1_19_1","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/W18-6319"},{"key":"e_1_3_2_1_20_1","unstructured":"Zachary Robertson. 2020. You Can Probably Amplify GPT3 Directly. https:\/\/bit.ly\/3tXT7Cw"},{"key":"e_1_3_2_1_21_1","unstructured":"Arram Sabeti. 2020. GPT-3: Using Fiction to Demonstrate How Prompts Impact Output Quality. https:\/\/bit.ly\/3jP3TWW"},{"key":"e_1_3_2_1_22_1","volume-title":"Eric Wallace, and Sameer Singh.","author":"Shin Taylor","year":"2020","unstructured":"Taylor Shin, Yasaman Razeghi, Robert L. Logan\u00a0IV au2, Eric Wallace, and Sameer Singh. 2020. AutoPrompt: Eliciting Knowledge from Language Models with Automatically Generated Prompts. arxiv:2010.15980\u00a0[cs.CL]"},{"key":"e_1_3_2_1_23_1","unstructured":"Latitude Team. 2020. World Creation by Analogy. https:\/\/bit.ly\/2N4vXK0"},{"key":"e_1_3_2_1_24_1","unstructured":"Lilian Wang. 2021. Controllable Neural Text Generation. (2021). https:\/\/bit.ly\/3pl2eKa"},{"key":"e_1_3_2_1_25_1","unstructured":"Qinyuan Ye and Xiang Ren. 2021. Zero-shot Learning by Generating Task-specific Adapters. arxiv:2101.00420\u00a0[cs.CL]"},{"key":"e_1_3_2_1_26_1","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.acl-main.601"},{"key":"e_1_3_2_1_27_1","volume-title":"lesswrong.com","author":"Yudkowsky Eliezer","year":"2007","unstructured":"Eliezer Yudkowsky. 2007. Rationalization. lesswrong.com (2007). https:\/\/bit.ly\/3pmYt6I"}],"event":{"name":"CHI '21: CHI Conference on Human Factors in Computing Systems","location":"Yokohama Japan","acronym":"CHI '21","sponsor":["SIGCHI ACM Special Interest Group on Computer-Human Interaction"]},"container-title":["Extended Abstracts of the 2021 CHI Conference on Human Factors in Computing Systems"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3411763.3451760","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3411763.3451760","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,6,17]],"date-time":"2025-06-17T21:28:21Z","timestamp":1750195701000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3411763.3451760"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2021,5,8]]},"references-count":27,"alternative-id":["10.1145\/3411763.3451760","10.1145\/3411763"],"URL":"https:\/\/doi.org\/10.1145\/3411763.3451760","relation":{},"subject":[],"published":{"date-parts":[[2021,5,8]]},"assertion":[{"value":"2021-05-08","order":3,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}