diff --git a/src/praisonai-agents/praisonaiagents/llm/llm.py b/src/praisonai-agents/praisonaiagents/llm/llm.py index 4346cf7ef..a49bd53c6 100644 --- a/src/praisonai-agents/praisonaiagents/llm/llm.py +++ b/src/praisonai-agents/praisonaiagents/llm/llm.py @@ -722,6 +722,33 @@ def _build_messages(self, prompt, system_prompt=None, chat_history=None, output_ if schema_model and hasattr(schema_model, 'model_json_schema'): system_prompt += f"\nReturn ONLY a JSON object that matches this Pydantic model: {json.dumps(schema_model.model_json_schema())}" + # For XML format models, add tool descriptions to system prompt + if tools and self._supports_xml_tool_format(): + system_prompt += "\n\nYou have access to the following tools:" + for tool in tools: + if isinstance(tool, dict) and 'function' in tool: + func = tool['function'] + name = func.get('name', 'unknown') + description = func.get('description', 'No description available') + + # Add parameter information if available + params = func.get('parameters', {}).get('properties', {}) + required = func.get('parameters', {}).get('required', []) + + param_info = "" + if params: + param_list = [] + for param_name, param_details in params.items(): + param_type = param_details.get('type', 'any') + is_required = param_name in required + param_list.append(f"{param_name} ({param_type}){'*' if is_required else ''}") + param_info = f" - Parameters: {', '.join(param_list)}" + + system_prompt += f"\n- {name}: {description}{param_info}" + + system_prompt += "\n\nWhen you need to use a tool, wrap your tool call in XML tags like this:" + system_prompt += "\n\n{\"name\": \"tool_name\", \"arguments\": {\"param\": \"value\"}}\n" + # Skip system messages for legacy o1 models as they don't support them if not self._needs_system_message_skip(): messages.append({"role": "system", "content": system_prompt}) @@ -3155,19 +3182,28 @@ def _build_completion_params(self, **override_params) -> Dict[str, Any]: logging.debug(f"Using Gemini native structured output with schema: {json.dumps(schema, indent=2)}") - # Add tool_choice="auto" when tools are provided (unless already specified) - if 'tools' in params and params['tools'] and 'tool_choice' not in params: - # For Gemini models, use tool_choice to encourage tool usage - if self._is_gemini_model(): - try: - import litellm - # Check if model supports function calling before setting tool_choice - if litellm.supports_function_calling(model=self.model): - params['tool_choice'] = 'auto' - except Exception as e: - # If check fails, still set tool_choice for known Gemini models - logging.debug(f"Could not verify function calling support: {e}. Setting tool_choice anyway.") - params['tool_choice'] = 'auto' + # Handle XML format models (like Qwen) differently for tool calls + if 'tools' in params and params['tools']: + if self._supports_xml_tool_format(): + # For XML format models, remove tools parameter to avoid OpenRouter routing issues + # Tools will be described in the system prompt instead + logging.debug("Removing tools parameter for XML format model to avoid provider routing issues") + params.pop('tools', None) + params.pop('tool_choice', None) + else: + # Add tool_choice="auto" when tools are provided (unless already specified) + if 'tool_choice' not in params: + # For Gemini models, use tool_choice to encourage tool usage + if self._is_gemini_model(): + try: + import litellm + # Check if model supports function calling before setting tool_choice + if litellm.supports_function_calling(model=self.model): + params['tool_choice'] = 'auto' + except Exception as e: + # If check fails, still set tool_choice for known Gemini models + logging.debug(f"Could not verify function calling support: {e}. Setting tool_choice anyway.") + params['tool_choice'] = 'auto' return params diff --git a/test_openrouter_xml_fix.py b/test_openrouter_xml_fix.py new file mode 100644 index 000000000..485f4b3a8 --- /dev/null +++ b/test_openrouter_xml_fix.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python3 +"""Test script for OpenRouter XML tool call fix""" + +import os +import sys + +# Add the source directory to the path +sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'src', 'praisonai-agents')) + +from praisonaiagents import Agent + +def get_weather(city: str) -> str: + """Get weather information for a city""" + return f"The weather in {city} is sunny with 22°C" + +def main(): + print("Testing OpenRouter XML tool call fix...") + + # Test with auto-detection (should detect Qwen as XML format) + agent = Agent( + instructions="You are a helpful assistant", + llm="openrouter/qwen/qwen-2.5-7b-instruct", + tools=[get_weather], + verbose=True + ) + + print("Created agent with Qwen model...") + + # Get the LLM instance directly from the agent + llm_instance = agent.llm_instance # This should be the LLM object + print(f"XML tool format supported: {llm_instance._supports_xml_tool_format()}") + + # Test the tool call without actually making API request + # We'll just verify the parameters are built correctly + test_tools = [ + { + "type": "function", + "function": { + "name": "get_weather", + "description": "Get weather information for a city", + "parameters": { + "type": "object", + "properties": { + "city": { + "type": "string", + "description": "The city name" + } + }, + "required": ["city"] + } + } + } + ] + + # Test _build_completion_params + params = llm_instance._build_completion_params( + messages=[{"role": "user", "content": "What's the weather in Tokyo?"}], + tools=test_tools, + temperature=0.2 + ) + + print("\n=== Completion Parameters ===") + print(f"Model: {params.get('model')}") + print(f"Tools included: {'tools' in params}") + print(f"Tool choice included: {'tool_choice' in params}") + + # Test _build_messages + messages, original = llm_instance._build_messages( + prompt="What's the weather in Tokyo?", + system_prompt="You are a helpful assistant", + tools=test_tools + ) + + print("\n=== System Message ===") + for msg in messages: + if msg['role'] == 'system': + print(msg['content']) + break + + print("\n✅ Test completed successfully!") + print("Key improvements:") + print("- Tools parameter is removed for XML format models") + print("- Tool descriptions are added to system prompt") + print("- XML tool call format instructions are included") + +if __name__ == "__main__": + main() \ No newline at end of file