From 03890d7106f438f32d14b0578da354eaa6c3c626 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Muhammet=20Eren=20Karaku=C5=9F?= Date: Tue, 17 Feb 2026 16:10:17 +0300 Subject: [PATCH] fix(llm): prevent infinite recursion and params overwrite in call()/acall() When a provider permanently rejects the 'stop' parameter, the retry logic in both call() and acall() recursively calls itself without checking whether 'stop' was already added to drop_params. This causes a RecursionError instead of a clean failure. Additionally, the else branch overwrites self.additional_params with a new dict containing only additional_drop_params, destroying any existing parameters (extra_headers, seed, etc.). Changes: - Add recursion guard: check if 'stop' is already in drop_params before retrying, raise immediately if retry already attempted - Preserve existing additional_params by appending to the list instead of replacing the entire dict - Apply identical fix to both sync call() and async acall() - Add 7 comprehensive tests covering recursion guard, params preservation, and async paths --- lib/crewai/src/crewai/llm.py | 118 +++++++++++++++---------- lib/crewai/tests/test_llm.py | 165 +++++++++++++++++++++++++++++++++++ 2 files changed, 237 insertions(+), 46 deletions(-) diff --git a/lib/crewai/src/crewai/llm.py b/lib/crewai/src/crewai/llm.py index 20a0373cb1..5abd1497bd 100644 --- a/lib/crewai/src/crewai/llm.py +++ b/lib/crewai/src/crewai/llm.py @@ -1769,31 +1769,43 @@ def call( ) and "'stop'" in str(e) if unsupported_stop: - if ( - "additional_drop_params" in self.additional_params - and isinstance( - self.additional_params["additional_drop_params"], list - ) - ): - self.additional_params["additional_drop_params"].append( - "stop" - ) - else: - self.additional_params = { - "additional_drop_params": ["stop"] - } + existing_drop_params = self.additional_params.get( + "additional_drop_params", [] + ) + already_dropping_stop = ( + isinstance(existing_drop_params, list) + and "stop" in existing_drop_params + ) - logging.info("Retrying LLM call without the unsupported 'stop'") + if not already_dropping_stop: + if ( + "additional_drop_params" in self.additional_params + and isinstance( + self.additional_params["additional_drop_params"], + list, + ) + ): + self.additional_params[ + "additional_drop_params" + ].append("stop") + else: + self.additional_params["additional_drop_params"] = [ + "stop" + ] + + logging.info( + "Retrying LLM call without the unsupported 'stop'" + ) - return self.call( - messages, - tools=tools, - callbacks=callbacks, - available_functions=available_functions, - from_task=from_task, - from_agent=from_agent, - response_model=response_model, - ) + return self.call( + messages, + tools=tools, + callbacks=callbacks, + available_functions=available_functions, + from_task=from_task, + from_agent=from_agent, + response_model=response_model, + ) crewai_event_bus.emit( self, @@ -1905,31 +1917,45 @@ async def acall( ) and "'stop'" in str(e) if unsupported_stop: - if ( - "additional_drop_params" in self.additional_params - and isinstance( - self.additional_params["additional_drop_params"], list - ) - ): - self.additional_params["additional_drop_params"].append( - "stop" - ) - else: - self.additional_params = { - "additional_drop_params": ["stop"] - } + existing_drop_params = self.additional_params.get( + "additional_drop_params", [] + ) + already_dropping_stop = ( + isinstance(existing_drop_params, list) + and "stop" in existing_drop_params + ) - logging.info("Retrying LLM call without the unsupported 'stop'") + if not already_dropping_stop: + if ( + "additional_drop_params" in self.additional_params + and isinstance( + self.additional_params[ + "additional_drop_params" + ], + list, + ) + ): + self.additional_params[ + "additional_drop_params" + ].append("stop") + else: + self.additional_params["additional_drop_params"] = [ + "stop" + ] + + logging.info( + "Retrying LLM call without the unsupported 'stop'" + ) - return await self.acall( - messages, - tools=tools, - callbacks=callbacks, - available_functions=available_functions, - from_task=from_task, - from_agent=from_agent, - response_model=response_model, - ) + return await self.acall( + messages, + tools=tools, + callbacks=callbacks, + available_functions=available_functions, + from_task=from_task, + from_agent=from_agent, + response_model=response_model, + ) crewai_event_bus.emit( self, diff --git a/lib/crewai/tests/test_llm.py b/lib/crewai/tests/test_llm.py index 71cb697909..779fd3c3cb 100644 --- a/lib/crewai/tests/test_llm.py +++ b/lib/crewai/tests/test_llm.py @@ -1022,3 +1022,168 @@ async def test_usage_info_streaming_with_acall(): assert llm._token_usage["total_tokens"] > 0 assert len(result) > 0 + + +# ============================================================================ +# Unsupported 'stop' parameter retry guard tests +# ============================================================================ + + +class TestUnsupportedStopRetryGuard: + """Tests for the unsupported 'stop' parameter retry logic in call()/acall(). + + Verifies that: + - A single retry is attempted when the provider rejects the 'stop' param + - Infinite recursion is prevented if the error persists after the retry + - Existing additional_params are preserved (not overwritten) when adding 'stop' + """ + + def _make_llm(self, **kwargs): + """Create an LLM instance configured for testing.""" + return LLM(model="gpt-4o-mini", is_litellm=True, **kwargs) + + @patch("crewai.llm.LLM._prepare_completion_params") + @patch("crewai.llm.LLM._handle_non_streaming_response") + def test_retries_once_then_raises_on_persistent_stop_error( + self, mock_handle, mock_prepare, caplog + ): + """If the 'stop' error persists after adding it to drop_params, + the call must raise instead of recursing infinitely.""" + llm = self._make_llm(stop=["stop_token"]) + mock_prepare.return_value = {"messages": [{"role": "user", "content": "hi"}]} + mock_handle.side_effect = Exception( + "Unsupported parameter: 'stop' is not supported" + ) + + with pytest.raises(Exception, match="Unsupported parameter"): + llm.call("test") + + # Should have been called exactly twice: initial + one retry + assert mock_handle.call_count == 2 + + @patch("crewai.llm.LLM._prepare_completion_params") + @patch("crewai.llm.LLM._handle_non_streaming_response") + def test_preserves_existing_additional_params( + self, mock_handle, mock_prepare + ): + """When adding 'stop' to drop_params, other additional_params must survive.""" + llm = self._make_llm( + stop=["stop_token"], + extra_headers={"X-Custom": "value"}, + ) + llm.additional_params = { + "extra_headers": {"X-Custom": "value"}, + "seed": 42, + } + mock_prepare.return_value = {"messages": [{"role": "user", "content": "hi"}]} + + call_count = 0 + + def side_effect(*args, **kwargs): + nonlocal call_count + call_count += 1 + if call_count == 1: + raise Exception("Unsupported parameter: 'stop' is not supported") + return "Success" + + mock_handle.side_effect = side_effect + + result = llm.call("test") + + assert result == "Success" + # Verify existing params were preserved + assert llm.additional_params.get("extra_headers") == {"X-Custom": "value"} + assert llm.additional_params.get("seed") == 42 + assert "stop" in llm.additional_params.get("additional_drop_params", []) + + @patch("crewai.llm.LLM._prepare_completion_params") + @patch("crewai.llm.LLM._handle_non_streaming_response") + def test_appends_to_existing_drop_params( + self, mock_handle, mock_prepare + ): + """When additional_drop_params already exists, 'stop' should be appended.""" + llm = self._make_llm( + stop=["stop_token"], + additional_drop_params=["another_param"], + ) + mock_prepare.return_value = {"messages": [{"role": "user", "content": "hi"}]} + + call_count = 0 + + def side_effect(*args, **kwargs): + nonlocal call_count + call_count += 1 + if call_count == 1: + raise Exception("Unsupported parameter: 'stop' is not supported") + return "Success" + + mock_handle.side_effect = side_effect + + result = llm.call("test") + + assert result == "Success" + assert llm.additional_params["additional_drop_params"] == [ + "another_param", + "stop", + ] + + @patch("crewai.llm.LLM._prepare_completion_params") + @patch("crewai.llm.LLM._handle_non_streaming_response") + def test_non_stop_exceptions_are_not_retried( + self, mock_handle, mock_prepare + ): + """Exceptions that don't mention 'stop' should propagate immediately.""" + llm = self._make_llm() + mock_prepare.return_value = {"messages": [{"role": "user", "content": "hi"}]} + mock_handle.side_effect = Exception("Some other error") + + with pytest.raises(Exception, match="Some other error"): + llm.call("test") + + assert mock_handle.call_count == 1 + + @pytest.mark.asyncio + @patch("crewai.llm.LLM._prepare_completion_params") + @patch("crewai.llm.LLM._ahandle_non_streaming_response") + async def test_acall_retries_once_then_raises_on_persistent_stop_error( + self, mock_handle, mock_prepare + ): + """Same infinite recursion guard for the async acall() method.""" + llm = self._make_llm(stop=["stop_token"]) + mock_prepare.return_value = {"messages": [{"role": "user", "content": "hi"}]} + mock_handle.side_effect = Exception( + "Unsupported parameter: 'stop' is not supported" + ) + + with pytest.raises(Exception, match="Unsupported parameter"): + await llm.acall("test") + + assert mock_handle.call_count == 2 + + @pytest.mark.asyncio + @patch("crewai.llm.LLM._prepare_completion_params") + @patch("crewai.llm.LLM._ahandle_non_streaming_response") + async def test_acall_preserves_existing_additional_params( + self, mock_handle, mock_prepare + ): + """Async version: existing additional_params must survive the retry.""" + llm = self._make_llm(stop=["stop_token"]) + llm.additional_params = {"seed": 42} + mock_prepare.return_value = {"messages": [{"role": "user", "content": "hi"}]} + + call_count = 0 + + async def side_effect(*args, **kwargs): + nonlocal call_count + call_count += 1 + if call_count == 1: + raise Exception("Unsupported parameter: 'stop' is not supported") + return "Success" + + mock_handle.side_effect = side_effect + + result = await llm.acall("test") + + assert result == "Success" + assert llm.additional_params.get("seed") == 42 + assert "stop" in llm.additional_params.get("additional_drop_params", [])