Skip to content

Commit 091eb36

Browse files
authored
More robust tool calling prompt (ggml-org#1455)
* More robust tool checking prompt * Inform UI we want a tool
1 parent 77bd24a commit 091eb36

File tree

1 file changed

+9
-6
lines changed

1 file changed

+9
-6
lines changed

koboldcpp.py

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2018,27 +2018,29 @@ def transform_genparams(genparams, api_format):
20182018
#if auto mode, determine whether a tool is needed
20192019
tools_string = json.dumps(tools_array, indent=0)
20202020
should_use_tools = True
2021+
user_start = adapter_obj.get("user_start", "### Instruction:\n\n")
2022+
user_end = adapter_obj.get("user_end", "\n\n### Response:\n\n")
20212023
if chosen_tool=="auto":
20222024
temp_poll = {
2023-
"prompt": f"{messages_string}\n\nAvailable Tools:\n{tools_string}\n\nBased on the above, answer in one word only (yes or no): Should a tool be used?\n\nAnswer:\n",
2025+
"prompt": f"{user_start}User query:\n\n{messages_string}\n\nTool Code:\n{tools_string}Determine from the provided tool code if the user query would be best answered by a listed tool (One word: yes / no):{user_end}",
20242026
"max_length":4,
2025-
"temperature":0.2,
2026-
"top_k":10,
2027+
"temperature":0.1,
2028+
"top_k":1,
20272029
"rep_pen":1,
20282030
"ban_eos_token":False
20292031
}
20302032
temp_poll_result = generate(genparams=temp_poll)
2031-
if temp_poll_result and "no" in temp_poll_result['text'].lower():
2033+
if temp_poll_result and not "yes" in temp_poll_result['text'].lower():
20322034
should_use_tools = False
20332035
if not args.quiet:
2034-
print(f"\nDeciding if we should use a tool: {temp_poll_result['text']} ({should_use_tools})")
2036+
print(f"\nRelevant tool is listed: {temp_poll_result['text']} ({should_use_tools})")
20352037

20362038
if should_use_tools:
20372039
messages_string += tools_string
20382040
messages_string += tool_json_formatting_instruction
20392041

20402042
# Set temperature low automatically if function calling
2041-
genparams["temperature"] = 0.2
2043+
genparams["temperature"] = 0.1
20422044
genparams["using_openai_tools"] = True
20432045

20442046
# Set grammar to llamacpp example grammar to force json response (see https://github.com/ggerganov/llama.cpp/blob/master/grammars/json_arr.gbnf)
@@ -2265,6 +2267,7 @@ def run_blocking(): # api format 1=basic,2=kai,3=oai,4=oai-chat
22652267
tool_calls = extract_json_from_string(recvtxt)
22662268
if tool_calls and len(tool_calls)>0:
22672269
recvtxt = None
2270+
currfinishreason = "tool_calls"
22682271
res = {"id": "chatcmpl-A1", "object": "chat.completion", "created": int(time.time()), "model": friendlymodelname,
22692272
"usage": {"prompt_tokens": prompttokens, "completion_tokens": comptokens, "total_tokens": (prompttokens+comptokens)},
22702273
"choices": [{"index": 0, "message": {"role": "assistant", "content": recvtxt, "tool_calls": tool_calls}, "finish_reason": currfinishreason, "logprobs":logprobsdict}]}

0 commit comments

Comments
 (0)