@inproceedings{8a55e061391f4722ad2366831a1f50cb,
title = "Eliciting Instruction-tuned Code Language Models' Capabilities to Utilize Auxiliary Function for Code Generation",
abstract = "We study the code generation behavior of instruction-tuned models built on top of code pre-trained language models when they could access an auxiliary function to implement a function. We design several ways to provide auxiliary functions to the models by adding them to the query or providing a response prefix to incorporate the ability to utilize auxiliary functions with the instruction-following capability. Our experimental results show the effectiveness of combining the base models' auxiliary function utilization ability with the instruction following ability. In particular, the performance of adopting our approaches with the open-sourced language models surpasses that of the recent powerful proprietary language models, i.e., gpt-4o.",
author = "Seonghyeon Lee and Suyeon Kim and Joonwon Jang and Heejae Chon and Dongha Lee and Hwanjo Yu",
note = "Publisher Copyright: {\textcopyright} 2024 Association for Computational Linguistics.; 2024 Findings of the Association for Computational Linguistics, EMNLP 2024 ; Conference date: 12-11-2024 Through 16-11-2024",
year = "2024",
doi = "10.18653/v1/2024.findings-emnlp.100",
language = "English",
series = "EMNLP 2024 - 2024 Conference on Empirical Methods in Natural Language Processing, Findings of EMNLP 2024",
publisher = "Association for Computational Linguistics (ACL)",
pages = "1840--1846",
editor = "Yaser Al-Onaizan and Mohit Bansal and Yun-Nung Chen",
booktitle = "EMNLP 2024 - 2024 Conference on Empirical Methods in Natural Language Processing, Findings of EMNLP 2024",
address = "United States",
}