Fix: Enable litellm.drop_params for O-series/GPT-5 model compatibility

O-series (o1, o3) and GPT-5 models only support temperature=1.
Setting litellm.drop_params=True auto-drops unsupported parameters
instead of throwing UnsupportedParamsError.

Fixes temperature=0.01 error for these models in LLM extraction.
This commit is contained in:
unclecode
2026-01-16 09:56:38 +00:00
parent a00da6557b
commit 6090629ee0

View File

@@ -1775,6 +1775,8 @@ def perform_completion_with_backoff(
from litellm import completion
from litellm.exceptions import RateLimitError
import litellm
litellm.drop_params = True # Auto-drop unsupported params (e.g., temperature for O-series/GPT-5)
extra_args = {"temperature": 0.01, "api_key": api_token, "base_url": base_url}
if json_response:
@@ -1864,7 +1866,9 @@ async def aperform_completion_with_backoff(
from litellm import acompletion
from litellm.exceptions import RateLimitError
import litellm
import asyncio
litellm.drop_params = True # Auto-drop unsupported params (e.g., temperature for O-series/GPT-5)
extra_args = {"temperature": 0.01, "api_key": api_token, "base_url": base_url}
if json_response: