mirror of
https://github.com/BerriAI/litellm.git
synced 2025-12-06 11:33:26 +08:00
* Fix Azure GPT-5 incorrectly routing to O-series config GPT-5 models support reasoning but are NOT O-series models and DO support temperature parameter. The previous routing logic in get_provider_responses_api_config() was incorrectly sending Azure GPT-5 requests to AzureOpenAIOSeriesResponsesAPIConfig which removes temperature from supported params. This fix explicitly excludes GPT-5 models from O-series routing, ensuring they use the standard AzureOpenAIResponsesAPIConfig which properly supports temperature. Fixes: Azure GPT-5 throwing UnsupportedParamsError for temperature parameter Tested: Added comprehensive unit tests for GPT-5 and O-series routing * Apply suggestion from @xingyaoww * Apply suggestion from @xingyaoww * Improve Azure routing logic to use broader 'gpt' check for temperature support Based on feedback from @krrishdholakia, updated the routing logic to check for 'gpt' in model name instead of specifically 'gpt-5'. This approach is: - More future-proof: covers all GPT models (gpt-3.5, gpt-4, gpt-5, future models) - Simpler: single check for all GPT variants - More maintainable: won't need updates for each new GPT model Changes: - litellm/utils.py: Changed from is_gpt5 to is_gpt_model check - tests: Added comprehensive test for all GPT model variants (gpt-3.5 through gpt-5) All tests pass: - GPT models (gpt-3.5-turbo, gpt-4, gpt-4o, gpt-5) -> AzureOpenAIResponsesAPIConfig (supports temperature) - O-series models (o1, o3) -> AzureOpenAIOSeriesResponsesAPIConfig (no temperature) Co-authored-by: openhands <openhands@all-hands.dev> --------- Co-authored-by: openhands <openhands@all-hands.dev>
98 lines
3.8 KiB
Python
98 lines
3.8 KiB
Python
"""
|
|
Test that Azure GPT-5 models support temperature parameter in Responses API.
|
|
"""
|
|
|
|
import pytest
|
|
from litellm.utils import ProviderConfigManager
|
|
from litellm.types.utils import LlmProviders
|
|
|
|
|
|
def test_azure_gpt5_supports_temperature():
|
|
"""Test that Azure GPT-5 uses the correct config that supports temperature."""
|
|
config = ProviderConfigManager.get_provider_responses_api_config(
|
|
provider=LlmProviders.AZURE,
|
|
model="gpt-5"
|
|
)
|
|
|
|
# Should use AzureOpenAIResponsesAPIConfig, not AzureOpenAIOSeriesResponsesAPIConfig
|
|
assert type(config).__name__ == "AzureOpenAIResponsesAPIConfig"
|
|
|
|
# Should support temperature parameter
|
|
supported_params = config.get_supported_openai_params("gpt-5")
|
|
assert "temperature" in supported_params, "Azure GPT-5 should support temperature parameter"
|
|
|
|
|
|
def test_azure_o_series_does_not_support_temperature():
|
|
"""Test that Azure O-series models still use the correct O-series config."""
|
|
test_models = ["o1", "o1-preview", "o3"]
|
|
|
|
for model in test_models:
|
|
config = ProviderConfigManager.get_provider_responses_api_config(
|
|
provider=LlmProviders.AZURE,
|
|
model=model
|
|
)
|
|
|
|
# Should use AzureOpenAIOSeriesResponsesAPIConfig
|
|
assert type(config).__name__ == "AzureOpenAIOSeriesResponsesAPIConfig", \
|
|
f"Azure {model} should use O-series config"
|
|
|
|
# Should NOT support temperature parameter
|
|
supported_params = config.get_supported_openai_params(model)
|
|
assert "temperature" not in supported_params, \
|
|
f"Azure {model} should NOT support temperature parameter"
|
|
|
|
|
|
def test_openai_gpt5_supports_temperature():
|
|
"""Test that OpenAI GPT-5 supports temperature parameter."""
|
|
config = ProviderConfigManager.get_provider_responses_api_config(
|
|
provider=LlmProviders.OPENAI,
|
|
model="gpt-5"
|
|
)
|
|
|
|
# Should use OpenAIResponsesAPIConfig
|
|
assert type(config).__name__ == "OpenAIResponsesAPIConfig"
|
|
|
|
# Should support temperature parameter
|
|
supported_params = config.get_supported_openai_params("gpt-5")
|
|
assert "temperature" in supported_params, "OpenAI GPT-5 should support temperature parameter"
|
|
|
|
|
|
def test_azure_gpt5_variants_support_temperature():
|
|
"""Test that various GPT-5 model name variants support temperature."""
|
|
gpt5_variants = ["gpt-5", "gpt-5-turbo", "GPT-5", "azure/gpt-5"]
|
|
|
|
for model in gpt5_variants:
|
|
config = ProviderConfigManager.get_provider_responses_api_config(
|
|
provider=LlmProviders.AZURE,
|
|
model=model
|
|
)
|
|
|
|
# All GPT-5 variants should use the base config, not O-series config
|
|
assert type(config).__name__ == "AzureOpenAIResponsesAPIConfig", \
|
|
f"Model '{model}' should not use O-series config"
|
|
|
|
# All should support temperature
|
|
supported_params = config.get_supported_openai_params(model)
|
|
assert "temperature" in supported_params, \
|
|
f"Model '{model}' should support temperature parameter"
|
|
|
|
|
|
def test_azure_gpt_models_support_temperature():
|
|
"""Test that all GPT models (gpt-3.5, gpt-4, gpt-5, etc.) support temperature."""
|
|
gpt_models = ["gpt-3.5-turbo", "gpt-4", "gpt-4-turbo", "gpt-4o", "gpt-5"]
|
|
|
|
for model in gpt_models:
|
|
config = ProviderConfigManager.get_provider_responses_api_config(
|
|
provider=LlmProviders.AZURE,
|
|
model=model
|
|
)
|
|
|
|
# All GPT models should use the base config, not O-series config
|
|
assert type(config).__name__ == "AzureOpenAIResponsesAPIConfig", \
|
|
f"Model '{model}' should not use O-series config"
|
|
|
|
# All should support temperature
|
|
supported_params = config.get_supported_openai_params(model)
|
|
assert "temperature" in supported_params, \
|
|
f"Model '{model}' should support temperature parameter"
|